mirror of
https://github.com/home-assistant/core.git
synced 2025-11-18 15:30:10 +00:00
Compare commits
33 Commits
context-tr
...
dev_target
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6041894b41 | ||
|
|
bd07f74cf8 | ||
|
|
128ff4004c | ||
|
|
50afba3958 | ||
|
|
14088a67f2 | ||
|
|
212c8f2688 | ||
|
|
e29b9026ab | ||
|
|
0c8dda1956 | ||
|
|
edf82db057 | ||
|
|
37644511f6 | ||
|
|
3685d0f7c2 | ||
|
|
3dabfeb329 | ||
|
|
8e7d2d7108 | ||
|
|
2fe4a1164b | ||
|
|
05175294f6 | ||
|
|
e2ddfb8782 | ||
|
|
f1cc133ff6 | ||
|
|
0cf97cf577 | ||
|
|
38cea2e5f0 | ||
|
|
71876d5b34 | ||
|
|
0f780254e1 | ||
|
|
9e40972b11 | ||
|
|
07ef61dd8d | ||
|
|
1bf6771a54 | ||
|
|
e7a7cb829e | ||
|
|
6f6b2f1ad3 | ||
|
|
1cc4890f75 | ||
|
|
d3dd9b26c9 | ||
|
|
a64d61df05 | ||
|
|
e7c6c5311d | ||
|
|
72a524c868 | ||
|
|
b437113f31 | ||
|
|
e0e263d3b5 |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 2
|
||||
CACHE_VERSION: 1
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
@@ -622,7 +622,7 @@ jobs:
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
|
||||
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1017,8 +1017,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
|
||||
/tests/components/music_assistant/ @music-assistant @arturpragacz
|
||||
/homeassistant/components/music_assistant/ @music-assistant
|
||||
/tests/components/music_assistant/ @music-assistant
|
||||
/homeassistant/components/mutesync/ @currentoor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
|
||||
@@ -96,5 +96,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:power-on"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:power-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,6 +187,13 @@
|
||||
"heat_cool": "Heat/cool",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -285,5 +292,29 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Climate"
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"description": "Triggers when a climate starts to heat.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_heating::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::triggers::turned_off::fields::behavior::description%]",
|
||||
"name": "[%key:component::climate::triggers::turned_off::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate starts to heat"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers when a climate is turned off.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted climates to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned off"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
23
homeassistant/components/climate/trigger.py
Normal file
23
homeassistant/components/climate/trigger.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Provides triggers for climates."""
|
||||
|
||||
from homeassistant.const import STATE_OFF
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_state_attribute_trigger,
|
||||
make_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"started_heating": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.HEATING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for climates."""
|
||||
return TRIGGERS
|
||||
19
homeassistant/components/climate/triggers.yaml
Normal file
19
homeassistant/components/climate/triggers.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
|
||||
started_heating: *trigger_common
|
||||
|
||||
turned_off: *trigger_common
|
||||
@@ -37,6 +37,13 @@ USER_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
@@ -168,55 +175,36 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the device."""
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||
)
|
||||
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
try:
|
||||
data_to_validate = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
||||
}
|
||||
await validate_input(self.hass, data_to_validate)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
data_updates = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
}
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates=data_updates
|
||||
)
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
|
||||
): cv.string,
|
||||
vol.Required(
|
||||
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
|
||||
): cv.port,
|
||||
vol.Optional(CONF_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=schema,
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Any, Literal
|
||||
from typing import Literal
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
import voluptuous as vol
|
||||
@@ -21,7 +21,6 @@ from homeassistant.core import (
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
@@ -53,8 +52,6 @@ from .const import (
|
||||
DATA_COMPONENT,
|
||||
DOMAIN,
|
||||
HOME_ASSISTANT_AGENT,
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
SERVICE_PROCESS,
|
||||
SERVICE_RELOAD,
|
||||
ConversationEntityFeature,
|
||||
@@ -269,13 +266,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
hass_config_path = hass.config.path()
|
||||
config_intents = _get_config_intents(config, hass_config_path)
|
||||
manager.update_config_intents(config_intents)
|
||||
|
||||
await async_setup_default_agent(hass, entity_component)
|
||||
agent_config = config.get(DOMAIN, {})
|
||||
await async_setup_default_agent(
|
||||
hass, entity_component, config_intents=agent_config.get("intents", {})
|
||||
)
|
||||
|
||||
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
||||
"""Parse text into commands."""
|
||||
@@ -300,16 +294,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def handle_reload(service: ServiceCall) -> None:
|
||||
"""Reload intents."""
|
||||
language = service.data.get(ATTR_LANGUAGE)
|
||||
if language is None:
|
||||
conf = await async_integration_yaml_config(hass, DOMAIN)
|
||||
if conf is not None:
|
||||
config_intents = _get_config_intents(conf, hass_config_path)
|
||||
manager.update_config_intents(config_intents)
|
||||
|
||||
agent = manager.default_agent
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
if agent is not None:
|
||||
await agent.async_reload(language=language)
|
||||
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
@@ -326,27 +313,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str, Any]:
|
||||
"""Return config intents."""
|
||||
intents = config.get(DOMAIN, {}).get("intents", {})
|
||||
return {
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in intents.items()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
@@ -147,7 +147,6 @@ class AgentManager:
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
self.config_intents: dict[str, Any] = {}
|
||||
self.triggers_details: list[TriggerDetails] = []
|
||||
|
||||
@callback
|
||||
@@ -200,16 +199,9 @@ class AgentManager:
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
agent.update_config_intents(self.config_intents)
|
||||
agent.update_triggers(self.triggers_details)
|
||||
self.default_agent = agent
|
||||
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self.config_intents = intents
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_config_intents(intents)
|
||||
|
||||
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
||||
"""Register a trigger."""
|
||||
self.triggers_details.append(trigger_details)
|
||||
|
||||
@@ -30,7 +30,3 @@ class ConversationEntityFeature(IntFlag):
|
||||
"""Supported features of the conversation entity."""
|
||||
|
||||
CONTROL = 1
|
||||
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
|
||||
@@ -77,12 +77,7 @@ from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
ConversationEntityFeature,
|
||||
)
|
||||
from .const import DOMAIN, ConversationEntityFeature
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
@@ -96,6 +91,8 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
||||
|
||||
ERROR_SENTINEL = object()
|
||||
@@ -205,9 +202,10 @@ class IntentCache:
|
||||
async def async_setup_default_agent(
|
||||
hass: HomeAssistant,
|
||||
entity_component: EntityComponent[ConversationEntity],
|
||||
config_intents: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up entity registry listener for the default agent."""
|
||||
agent = DefaultAgent(hass)
|
||||
agent = DefaultAgent(hass, config_intents)
|
||||
await entity_component.async_add_entities([agent])
|
||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||
|
||||
@@ -232,14 +230,14 @@ class DefaultAgent(ConversationEntity):
|
||||
_attr_name = "Home Assistant"
|
||||
_attr_supported_features = ConversationEntityFeature.CONTROL
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
|
||||
"""Initialize the default agent."""
|
||||
self.hass = hass
|
||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||
self._load_intents_lock = asyncio.Lock()
|
||||
|
||||
# Intents from common conversation config
|
||||
self._config_intents: dict[str, Any] = {}
|
||||
# intent -> [sentences]
|
||||
self._config_intents: dict[str, Any] = config_intents
|
||||
|
||||
# Sentences that will trigger a callback (skipping intent recognition)
|
||||
self._triggers_details: list[TriggerDetails] = []
|
||||
@@ -1037,14 +1035,6 @@ class DefaultAgent(ConversationEntity):
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
@callback
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self._config_intents = intents
|
||||
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
async def async_prepare(self, language: str | None = None) -> None:
|
||||
"""Load intents for a language."""
|
||||
if language is None:
|
||||
@@ -1169,10 +1159,33 @@ class DefaultAgent(ConversationEntity):
|
||||
custom_sentences_path,
|
||||
)
|
||||
|
||||
merge_dict(
|
||||
intents_dict,
|
||||
self._config_intents,
|
||||
)
|
||||
# Load sentences from HA config for default language only
|
||||
if self._config_intents and (
|
||||
self.hass.config.language in (language, language_variant)
|
||||
):
|
||||
hass_config_path = self.hass.config.path()
|
||||
merge_dict(
|
||||
intents_dict,
|
||||
{
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in self._config_intents.items()
|
||||
}
|
||||
},
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Loaded intents from configuration.yaml",
|
||||
)
|
||||
|
||||
if not intents_dict:
|
||||
return None
|
||||
|
||||
@@ -108,5 +108,10 @@
|
||||
"toggle_cover_tilt": {
|
||||
"service": "mdi:arrow-top-right-bottom-left"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"garage_opened": {
|
||||
"trigger": "mdi:garage-open"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,6 +82,15 @@
|
||||
"name": "Window"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"close_cover": {
|
||||
"description": "Closes a cover.",
|
||||
@@ -136,5 +145,22 @@
|
||||
"name": "Toggle tilt"
|
||||
}
|
||||
},
|
||||
"title": "Cover"
|
||||
"title": "Cover",
|
||||
"triggers": {
|
||||
"garage_opened": {
|
||||
"description": "Triggers when a garage door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::garage_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted garage doors to trigger on.",
|
||||
"name": "Behavior"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the garage doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a garage door opens"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
108
homeassistant/components/cover/trigger.py
Normal file
108
homeassistant/components/cover/trigger.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""Provides triggers for covers."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
)
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from . import ATTR_CURRENT_POSITION, CoverDeviceClass, CoverState
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_FULLY_OPENED: Final = "fully_opened"
|
||||
|
||||
COVER_OPENED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(ATTR_FULLY_OPENED, default=False): bool,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_device_class_or_undefined(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> str | None | UndefinedType:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return get_device_class(hass, entity_id)
|
||||
except HomeAssistantError:
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class CoverOpenedClosedTrigger(EntityTriggerBase):
|
||||
"""Class for cover opened and closed triggers."""
|
||||
|
||||
_attribute: str = ATTR_CURRENT_POSITION
|
||||
_attribute_value: int | None = None
|
||||
_device_class: CoverDeviceClass | None
|
||||
_domain: str = DOMAIN
|
||||
_to_states: set[str]
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
if state.state not in self._to_states:
|
||||
return False
|
||||
if (
|
||||
self._attribute_value is not None
|
||||
and (value := state.attributes.get(self._attribute)) is not None
|
||||
and value != self._attribute_value
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if get_device_class_or_undefined(self._hass, entity_id)
|
||||
== self._device_class
|
||||
}
|
||||
|
||||
|
||||
class CoverOpenedTrigger(CoverOpenedClosedTrigger):
|
||||
"""Class for cover opened triggers."""
|
||||
|
||||
_schema = COVER_OPENED_TRIGGER_SCHEMA
|
||||
_to_states = {CoverState.OPEN, CoverState.OPENING}
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
if self._options.get(ATTR_FULLY_OPENED):
|
||||
self._attribute_value = 100
|
||||
|
||||
|
||||
def make_cover_opened_trigger(
|
||||
device_class: CoverDeviceClass | None,
|
||||
) -> type[CoverOpenedTrigger]:
|
||||
"""Create an entity state attribute trigger class."""
|
||||
|
||||
class CustomTrigger(CoverOpenedTrigger):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"garage_opened": make_cover_opened_trigger(CoverDeviceClass.GARAGE),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for covers."""
|
||||
return TRIGGERS
|
||||
21
homeassistant/components/cover/triggers.yaml
Normal file
21
homeassistant/components/cover/triggers.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
garage_opened:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
device_class: garage
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
fully_opened:
|
||||
required: true
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyecobee"],
|
||||
"requirements": ["python-ecobee-api==0.3.2"],
|
||||
"requirements": ["python-ecobee-api==0.2.20"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": [
|
||||
{
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections import Counter
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import Literal, NotRequired, TypedDict
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -29,7 +29,7 @@ async def async_get_manager(hass: HomeAssistant) -> EnergyManager:
|
||||
class FlowFromGridSourceType(TypedDict):
|
||||
"""Dictionary describing the 'from' stat for the grid source."""
|
||||
|
||||
# statistic_id of an energy meter (kWh)
|
||||
# statistic_id of a an energy meter (kWh)
|
||||
stat_energy_from: str
|
||||
|
||||
# statistic_id of costs ($) incurred from the energy meter
|
||||
@@ -58,14 +58,6 @@ class FlowToGridSourceType(TypedDict):
|
||||
number_energy_price: float | None # Price for energy ($/kWh)
|
||||
|
||||
|
||||
class GridPowerSourceType(TypedDict):
|
||||
"""Dictionary holding the source of grid power consumption."""
|
||||
|
||||
# statistic_id of a power meter (kW)
|
||||
# negative values indicate grid return
|
||||
stat_rate: str
|
||||
|
||||
|
||||
class GridSourceType(TypedDict):
|
||||
"""Dictionary holding the source of grid energy consumption."""
|
||||
|
||||
@@ -73,7 +65,6 @@ class GridSourceType(TypedDict):
|
||||
|
||||
flow_from: list[FlowFromGridSourceType]
|
||||
flow_to: list[FlowToGridSourceType]
|
||||
power: NotRequired[list[GridPowerSourceType]]
|
||||
|
||||
cost_adjustment_day: float
|
||||
|
||||
@@ -84,7 +75,6 @@ class SolarSourceType(TypedDict):
|
||||
type: Literal["solar"]
|
||||
|
||||
stat_energy_from: str
|
||||
stat_rate: NotRequired[str]
|
||||
config_entry_solar_forecast: list[str] | None
|
||||
|
||||
|
||||
@@ -95,8 +85,6 @@ class BatterySourceType(TypedDict):
|
||||
|
||||
stat_energy_from: str
|
||||
stat_energy_to: str
|
||||
# positive when discharging, negative when charging
|
||||
stat_rate: NotRequired[str]
|
||||
|
||||
|
||||
class GasSourceType(TypedDict):
|
||||
@@ -148,15 +136,12 @@ class DeviceConsumption(TypedDict):
|
||||
# This is an ever increasing value
|
||||
stat_consumption: str
|
||||
|
||||
# Instantaneous rate of flow: W, L/min or m³/h
|
||||
stat_rate: NotRequired[str]
|
||||
|
||||
# An optional custom name for display in energy graphs
|
||||
name: str | None
|
||||
|
||||
# An optional statistic_id identifying a device
|
||||
# that includes this device's consumption in its total
|
||||
included_in_stat: NotRequired[str]
|
||||
included_in_stat: str | None
|
||||
|
||||
|
||||
class EnergyPreferences(TypedDict):
|
||||
@@ -209,12 +194,6 @@ FLOW_TO_GRID_SOURCE_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
GRID_POWER_SOURCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("stat_rate"): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _generate_unique_value_validator(key: str) -> Callable[[list[dict]], list[dict]]:
|
||||
"""Generate a validator that ensures a value is only used once."""
|
||||
@@ -245,10 +224,6 @@ GRID_SOURCE_SCHEMA = vol.Schema(
|
||||
[FLOW_TO_GRID_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_energy_to"),
|
||||
),
|
||||
vol.Optional("power"): vol.All(
|
||||
[GRID_POWER_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_rate"),
|
||||
),
|
||||
vol.Required("cost_adjustment_day"): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
@@ -256,7 +231,6 @@ SOLAR_SOURCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("type"): "solar",
|
||||
vol.Required("stat_energy_from"): str,
|
||||
vol.Optional("stat_rate"): str,
|
||||
vol.Optional("config_entry_solar_forecast"): vol.Any([str], None),
|
||||
}
|
||||
)
|
||||
@@ -265,7 +239,6 @@ BATTERY_SOURCE_SCHEMA = vol.Schema(
|
||||
vol.Required("type"): "battery",
|
||||
vol.Required("stat_energy_from"): str,
|
||||
vol.Required("stat_energy_to"): str,
|
||||
vol.Optional("stat_rate"): str,
|
||||
}
|
||||
)
|
||||
GAS_SOURCE_SCHEMA = vol.Schema(
|
||||
@@ -321,7 +294,6 @@ ENERGY_SOURCE_SCHEMA = vol.All(
|
||||
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("stat_consumption"): str,
|
||||
vol.Optional("stat_rate"): str,
|
||||
vol.Optional("name"): str,
|
||||
vol.Optional("included_in_stat"): str,
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
UnitOfEnergy,
|
||||
UnitOfPower,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback, valid_entity_id
|
||||
@@ -24,17 +23,12 @@ ENERGY_USAGE_DEVICE_CLASSES = (sensor.SensorDeviceClass.ENERGY,)
|
||||
ENERGY_USAGE_UNITS: dict[str, tuple[UnitOfEnergy, ...]] = {
|
||||
sensor.SensorDeviceClass.ENERGY: tuple(UnitOfEnergy)
|
||||
}
|
||||
POWER_USAGE_DEVICE_CLASSES = (sensor.SensorDeviceClass.POWER,)
|
||||
POWER_USAGE_UNITS: dict[str, tuple[UnitOfPower, ...]] = {
|
||||
sensor.SensorDeviceClass.POWER: tuple(UnitOfPower)
|
||||
}
|
||||
|
||||
ENERGY_PRICE_UNITS = tuple(
|
||||
f"/{unit}" for units in ENERGY_USAGE_UNITS.values() for unit in units
|
||||
)
|
||||
ENERGY_UNIT_ERROR = "entity_unexpected_unit_energy"
|
||||
ENERGY_PRICE_UNIT_ERROR = "entity_unexpected_unit_energy_price"
|
||||
POWER_UNIT_ERROR = "entity_unexpected_unit_power"
|
||||
GAS_USAGE_DEVICE_CLASSES = (
|
||||
sensor.SensorDeviceClass.ENERGY,
|
||||
sensor.SensorDeviceClass.GAS,
|
||||
@@ -88,10 +82,6 @@ def _get_placeholders(hass: HomeAssistant, issue_type: str) -> dict[str, str] |
|
||||
f"{currency}{unit}" for unit in ENERGY_PRICE_UNITS
|
||||
),
|
||||
}
|
||||
if issue_type == POWER_UNIT_ERROR:
|
||||
return {
|
||||
"power_units": ", ".join(POWER_USAGE_UNITS[sensor.SensorDeviceClass.POWER]),
|
||||
}
|
||||
if issue_type == GAS_UNIT_ERROR:
|
||||
return {
|
||||
"energy_units": ", ".join(GAS_USAGE_UNITS[sensor.SensorDeviceClass.ENERGY]),
|
||||
@@ -169,7 +159,7 @@ class EnergyPreferencesValidation:
|
||||
|
||||
|
||||
@callback
|
||||
def _async_validate_stat_common(
|
||||
def _async_validate_usage_stat(
|
||||
hass: HomeAssistant,
|
||||
metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
|
||||
stat_id: str,
|
||||
@@ -177,41 +167,37 @@ def _async_validate_stat_common(
|
||||
allowed_units: Mapping[str, Sequence[str]],
|
||||
unit_error: str,
|
||||
issues: ValidationIssues,
|
||||
check_negative: bool = False,
|
||||
) -> str | None:
|
||||
"""Validate common aspects of a statistic.
|
||||
|
||||
Returns the entity_id if validation succeeds, None otherwise.
|
||||
"""
|
||||
) -> None:
|
||||
"""Validate a statistic."""
|
||||
if stat_id not in metadata:
|
||||
issues.add_issue(hass, "statistics_not_defined", stat_id)
|
||||
|
||||
has_entity_source = valid_entity_id(stat_id)
|
||||
|
||||
if not has_entity_source:
|
||||
return None
|
||||
return
|
||||
|
||||
entity_id = stat_id
|
||||
|
||||
if not recorder.is_entity_recorded(hass, entity_id):
|
||||
issues.add_issue(hass, "recorder_untracked", entity_id)
|
||||
return None
|
||||
return
|
||||
|
||||
if (state := hass.states.get(entity_id)) is None:
|
||||
issues.add_issue(hass, "entity_not_defined", entity_id)
|
||||
return None
|
||||
return
|
||||
|
||||
if state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
issues.add_issue(hass, "entity_unavailable", entity_id, state.state)
|
||||
return None
|
||||
return
|
||||
|
||||
try:
|
||||
current_value: float | None = float(state.state)
|
||||
except ValueError:
|
||||
issues.add_issue(hass, "entity_state_non_numeric", entity_id, state.state)
|
||||
return None
|
||||
return
|
||||
|
||||
if check_negative and current_value is not None and current_value < 0:
|
||||
if current_value is not None and current_value < 0:
|
||||
issues.add_issue(hass, "entity_negative_state", entity_id, current_value)
|
||||
|
||||
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
@@ -225,36 +211,6 @@ def _async_validate_stat_common(
|
||||
if device_class and unit not in allowed_units.get(device_class, []):
|
||||
issues.add_issue(hass, unit_error, entity_id, unit)
|
||||
|
||||
return entity_id
|
||||
|
||||
|
||||
@callback
|
||||
def _async_validate_usage_stat(
|
||||
hass: HomeAssistant,
|
||||
metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
|
||||
stat_id: str,
|
||||
allowed_device_classes: Sequence[str],
|
||||
allowed_units: Mapping[str, Sequence[str]],
|
||||
unit_error: str,
|
||||
issues: ValidationIssues,
|
||||
) -> None:
|
||||
"""Validate a statistic."""
|
||||
entity_id = _async_validate_stat_common(
|
||||
hass,
|
||||
metadata,
|
||||
stat_id,
|
||||
allowed_device_classes,
|
||||
allowed_units,
|
||||
unit_error,
|
||||
issues,
|
||||
check_negative=True,
|
||||
)
|
||||
|
||||
if entity_id is None:
|
||||
return
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
state_class = state.attributes.get(sensor.ATTR_STATE_CLASS)
|
||||
|
||||
allowed_state_classes = [
|
||||
@@ -299,39 +255,6 @@ def _async_validate_price_entity(
|
||||
issues.add_issue(hass, unit_error, entity_id, unit)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_validate_power_stat(
|
||||
hass: HomeAssistant,
|
||||
metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
|
||||
stat_id: str,
|
||||
allowed_device_classes: Sequence[str],
|
||||
allowed_units: Mapping[str, Sequence[str]],
|
||||
unit_error: str,
|
||||
issues: ValidationIssues,
|
||||
) -> None:
|
||||
"""Validate a power statistic."""
|
||||
entity_id = _async_validate_stat_common(
|
||||
hass,
|
||||
metadata,
|
||||
stat_id,
|
||||
allowed_device_classes,
|
||||
allowed_units,
|
||||
unit_error,
|
||||
issues,
|
||||
check_negative=False,
|
||||
)
|
||||
|
||||
if entity_id is None:
|
||||
return
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
state_class = state.attributes.get(sensor.ATTR_STATE_CLASS)
|
||||
|
||||
if state_class != sensor.SensorStateClass.MEASUREMENT:
|
||||
issues.add_issue(hass, "entity_unexpected_state_class", entity_id, state_class)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_validate_cost_stat(
|
||||
hass: HomeAssistant,
|
||||
@@ -386,260 +309,11 @@ def _async_validate_auto_generated_cost_entity(
|
||||
issues.add_issue(hass, "recorder_untracked", cost_entity_id)
|
||||
|
||||
|
||||
def _validate_grid_source(
|
||||
hass: HomeAssistant,
|
||||
source: data.GridSourceType,
|
||||
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
|
||||
wanted_statistics_metadata: set[str],
|
||||
source_result: ValidationIssues,
|
||||
validate_calls: list[functools.partial[None]],
|
||||
) -> None:
|
||||
"""Validate grid energy source."""
|
||||
flow_from: data.FlowFromGridSourceType
|
||||
for flow_from in source["flow_from"]:
|
||||
wanted_statistics_metadata.add(flow_from["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
flow_from["stat_energy_from"],
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_cost := flow_from.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_cost,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := flow_from.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
ENERGY_PRICE_UNITS,
|
||||
ENERGY_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
flow_from.get("entity_energy_price") is not None
|
||||
or flow_from.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
flow_from["stat_energy_from"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
flow_to: data.FlowToGridSourceType
|
||||
for flow_to in source["flow_to"]:
|
||||
wanted_statistics_metadata.add(flow_to["stat_energy_to"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
flow_to["stat_energy_to"],
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_compensation := flow_to.get("stat_compensation")) is not None:
|
||||
wanted_statistics_metadata.add(stat_compensation)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_compensation,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := flow_to.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
ENERGY_PRICE_UNITS,
|
||||
ENERGY_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
flow_to.get("entity_energy_price") is not None
|
||||
or flow_to.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
flow_to["stat_energy_to"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
for power_stat in source.get("power", []):
|
||||
wanted_statistics_metadata.add(power_stat["stat_rate"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_power_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
power_stat["stat_rate"],
|
||||
POWER_USAGE_DEVICE_CLASSES,
|
||||
POWER_USAGE_UNITS,
|
||||
POWER_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _validate_gas_source(
|
||||
hass: HomeAssistant,
|
||||
source: data.GasSourceType,
|
||||
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
|
||||
wanted_statistics_metadata: set[str],
|
||||
source_result: ValidationIssues,
|
||||
validate_calls: list[functools.partial[None]],
|
||||
) -> None:
|
||||
"""Validate gas energy source."""
|
||||
wanted_statistics_metadata.add(source["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
source["stat_energy_from"],
|
||||
GAS_USAGE_DEVICE_CLASSES,
|
||||
GAS_USAGE_UNITS,
|
||||
GAS_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_cost := source.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_cost,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
GAS_PRICE_UNITS,
|
||||
GAS_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
source.get("entity_energy_price") is not None
|
||||
or source.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
source["stat_energy_from"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _validate_water_source(
|
||||
hass: HomeAssistant,
|
||||
source: data.WaterSourceType,
|
||||
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]],
|
||||
wanted_statistics_metadata: set[str],
|
||||
source_result: ValidationIssues,
|
||||
validate_calls: list[functools.partial[None]],
|
||||
) -> None:
|
||||
"""Validate water energy source."""
|
||||
wanted_statistics_metadata.add(source["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
source["stat_energy_from"],
|
||||
WATER_USAGE_DEVICE_CLASSES,
|
||||
WATER_USAGE_UNITS,
|
||||
WATER_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_cost := source.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_cost,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
WATER_PRICE_UNITS,
|
||||
WATER_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
source.get("entity_energy_price") is not None
|
||||
or source.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
source["stat_energy_from"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
|
||||
"""Validate the energy configuration."""
|
||||
manager: data.EnergyManager = await data.async_get_manager(hass)
|
||||
statistics_metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]] = {}
|
||||
validate_calls: list[functools.partial[None]] = []
|
||||
validate_calls = []
|
||||
wanted_statistics_metadata: set[str] = set()
|
||||
|
||||
result = EnergyPreferencesValidation()
|
||||
@@ -653,35 +327,215 @@ async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
|
||||
result.energy_sources.append(source_result)
|
||||
|
||||
if source["type"] == "grid":
|
||||
_validate_grid_source(
|
||||
hass,
|
||||
source,
|
||||
statistics_metadata,
|
||||
wanted_statistics_metadata,
|
||||
source_result,
|
||||
validate_calls,
|
||||
)
|
||||
flow: data.FlowFromGridSourceType | data.FlowToGridSourceType
|
||||
for flow in source["flow_from"]:
|
||||
wanted_statistics_metadata.add(flow["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
flow["stat_energy_from"],
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_cost := flow.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_cost,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (
|
||||
entity_energy_price := flow.get("entity_energy_price")
|
||||
) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
ENERGY_PRICE_UNITS,
|
||||
ENERGY_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
flow.get("entity_energy_price") is not None
|
||||
or flow.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
flow["stat_energy_from"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
for flow in source["flow_to"]:
|
||||
wanted_statistics_metadata.add(flow["stat_energy_to"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
flow["stat_energy_to"],
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_compensation := flow.get("stat_compensation")) is not None:
|
||||
wanted_statistics_metadata.add(stat_compensation)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_compensation,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (
|
||||
entity_energy_price := flow.get("entity_energy_price")
|
||||
) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
ENERGY_PRICE_UNITS,
|
||||
ENERGY_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
flow.get("entity_energy_price") is not None
|
||||
or flow.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
flow["stat_energy_to"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
elif source["type"] == "gas":
|
||||
_validate_gas_source(
|
||||
hass,
|
||||
source,
|
||||
statistics_metadata,
|
||||
wanted_statistics_metadata,
|
||||
source_result,
|
||||
validate_calls,
|
||||
wanted_statistics_metadata.add(source["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
source["stat_energy_from"],
|
||||
GAS_USAGE_DEVICE_CLASSES,
|
||||
GAS_USAGE_UNITS,
|
||||
GAS_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_cost := source.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_cost,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
GAS_PRICE_UNITS,
|
||||
GAS_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
source.get("entity_energy_price") is not None
|
||||
or source.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
source["stat_energy_from"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
elif source["type"] == "water":
|
||||
_validate_water_source(
|
||||
hass,
|
||||
source,
|
||||
statistics_metadata,
|
||||
wanted_statistics_metadata,
|
||||
source_result,
|
||||
validate_calls,
|
||||
wanted_statistics_metadata.add(source["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
source["stat_energy_from"],
|
||||
WATER_USAGE_DEVICE_CLASSES,
|
||||
WATER_USAGE_UNITS,
|
||||
WATER_UNIT_ERROR,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
if (stat_cost := source.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_cost_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_cost,
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
WATER_PRICE_UNITS,
|
||||
WATER_PRICE_UNIT_ERROR,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
source.get("entity_energy_price") is not None
|
||||
or source.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
source["stat_energy_from"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
elif source["type"] == "solar":
|
||||
wanted_statistics_metadata.add(source["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from urllib.parse import quote
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -153,9 +152,7 @@ class HassFoscamCamera(FoscamEntity, Camera):
|
||||
async def stream_source(self) -> str | None:
|
||||
"""Return the stream source."""
|
||||
if self._rtsp_port:
|
||||
_username = quote(self._username)
|
||||
_password = quote(self._password)
|
||||
return f"rtsp://{_username}:{_password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
|
||||
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -116,28 +116,20 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
is_open_wdr = None
|
||||
is_open_hdr = None
|
||||
reserve3 = product_info.get("reserve4")
|
||||
model = product_info.get("model")
|
||||
model_int = int(model) if model is not None else 7002
|
||||
if model_int > 7001:
|
||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||
if supports_wdr_adjustment_val:
|
||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||
mode = (
|
||||
is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||
)
|
||||
is_open_wdr = bool(int(mode))
|
||||
elif supports_hdr_adjustment_val:
|
||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||
mode = (
|
||||
is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||
)
|
||||
is_open_hdr = bool(int(mode))
|
||||
else:
|
||||
supports_wdr_adjustment_val = False
|
||||
supports_hdr_adjustment_val = False
|
||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||
if supports_wdr_adjustment_val:
|
||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||
is_open_wdr = bool(int(mode))
|
||||
elif supports_hdr_adjustment_val:
|
||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||
is_open_hdr = bool(int(mode))
|
||||
|
||||
ret_sw, software_capabilities = self.session.getSWCapabilities()
|
||||
|
||||
supports_speak_volume_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities1")) & 32)
|
||||
if ret_sw == 0
|
||||
|
||||
@@ -481,13 +481,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
sidebar_title="climate",
|
||||
sidebar_default_visible=False,
|
||||
)
|
||||
async_register_built_in_panel(
|
||||
hass,
|
||||
"home",
|
||||
sidebar_icon="mdi:home",
|
||||
sidebar_title="home",
|
||||
sidebar_default_visible=False,
|
||||
)
|
||||
|
||||
async_register_built_in_panel(hass, "profile")
|
||||
|
||||
@@ -777,7 +770,9 @@ class ManifestJSONView(HomeAssistantView):
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "frontend/get_icons",
|
||||
vol.Required("category"): vol.In({"entity", "entity_component", "services"}),
|
||||
vol.Required("category"): vol.In(
|
||||
{"entity", "entity_component", "services", "triggers"}
|
||||
),
|
||||
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==11.1.0"]
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==11.0.0"]
|
||||
}
|
||||
|
||||
77
homeassistant/components/hive/alarm_control_panel.py
Normal file
77
homeassistant/components/hive/alarm_control_panel.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Support for the Hive alarm."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HiveConfigEntry
|
||||
from .entity import HiveEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
HIVETOHA = {
|
||||
"home": AlarmControlPanelState.DISARMED,
|
||||
"asleep": AlarmControlPanelState.ARMED_NIGHT,
|
||||
"away": AlarmControlPanelState.ARMED_AWAY,
|
||||
"sos": AlarmControlPanelState.TRIGGERED,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HiveConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Hive thermostat based on a config entry."""
|
||||
|
||||
hive = entry.runtime_data
|
||||
if devices := hive.session.deviceList.get("alarm_control_panel"):
|
||||
async_add_entities(
|
||||
[HiveAlarmControlPanelEntity(hive, dev) for dev in devices], True
|
||||
)
|
||||
|
||||
|
||||
class HiveAlarmControlPanelEntity(HiveEntity, AlarmControlPanelEntity):
|
||||
"""Representation of a Hive alarm."""
|
||||
|
||||
_attr_supported_features = (
|
||||
AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.TRIGGER
|
||||
)
|
||||
_attr_code_arm_required = False
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
await self.hive.alarm.setMode(self.device, "home")
|
||||
|
||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm night command."""
|
||||
await self.hive.alarm.setMode(self.device, "asleep")
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
await self.hive.alarm.setMode(self.device, "away")
|
||||
|
||||
async def async_alarm_trigger(self, code: str | None = None) -> None:
|
||||
"""Send alarm trigger command."""
|
||||
await self.hive.alarm.setMode(self.device, "sos")
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update all Node data from Hive."""
|
||||
await self.hive.session.updateData(self.device)
|
||||
self.device = await self.hive.alarm.getAlarm(self.device)
|
||||
self._attr_available = self.device["deviceData"].get("online")
|
||||
if self._attr_available:
|
||||
if self.device["status"]["state"]:
|
||||
self._attr_alarm_state = AlarmControlPanelState.TRIGGERED
|
||||
else:
|
||||
self._attr_alarm_state = HIVETOHA[self.device["status"]["mode"]]
|
||||
@@ -11,6 +11,7 @@ CONFIG_ENTRY_VERSION = 1
|
||||
DEFAULT_NAME = "Hive"
|
||||
DOMAIN = "hive"
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.LIGHT,
|
||||
@@ -19,6 +20,7 @@ PLATFORMS = [
|
||||
Platform.WATER_HEATER,
|
||||
]
|
||||
PLATFORM_LOOKUP = {
|
||||
Platform.ALARM_CONTROL_PANEL: "alarm_control_panel",
|
||||
Platform.BINARY_SENSOR: "binary_sensor",
|
||||
Platform.CLIMATE: "climate",
|
||||
Platform.LIGHT: "light",
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
},
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["apyhiveapi"],
|
||||
"requirements": ["pyhive-integration==1.0.7"]
|
||||
"requirements": ["pyhive-integration==1.0.6"]
|
||||
}
|
||||
|
||||
@@ -22,6 +22,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.23.1"],
|
||||
"requirements": ["aiohomeconnect==0.23.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -412,8 +412,8 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity):
|
||||
"""Set the program value."""
|
||||
event = self.appliance.events.get(cast(EventKey, self.bsh_key))
|
||||
self._attr_current_option = (
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP.get(ProgramKey(event_value))
|
||||
if event and isinstance(event_value := event.value, str)
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP.get(cast(ProgramKey, event.value))
|
||||
if event
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
@@ -556,11 +556,8 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity):
|
||||
status = self.appliance.status[cast(StatusKey, self.bsh_key)].value
|
||||
self._update_native_value(status)
|
||||
|
||||
def _update_native_value(self, status: str | float | None) -> None:
|
||||
def _update_native_value(self, status: str | float) -> None:
|
||||
"""Set the value of the sensor based on the given value."""
|
||||
if status is None:
|
||||
self._attr_native_value = None
|
||||
return
|
||||
match self.device_class:
|
||||
case SensorDeviceClass.TIMESTAMP:
|
||||
self._attr_native_value = dt_util.utcnow() + timedelta(
|
||||
|
||||
@@ -1237,7 +1237,7 @@
|
||||
"message": "Error obtaining data from the API: {error}"
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
"message": "OAuth2 implementation temporarily unavailable, will retry"
|
||||
},
|
||||
"pause_program": {
|
||||
"message": "Error pausing program: {error}"
|
||||
|
||||
@@ -76,18 +76,9 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
|
||||
|
||||
context: ConfigFlowContext
|
||||
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
# Early ZBT-2 samples used RTS/DTR to trigger the bootloader, later ones use the
|
||||
# baudrate method. Since the two are mutually exclusive we just use both.
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE]
|
||||
APPLICATION_PROBE_METHODS = [
|
||||
(ApplicationType.GECKO_BOOTLOADER, 115200),
|
||||
(ApplicationType.EZSP, ZIGBEE_BAUDRATE),
|
||||
(ApplicationType.SPINEL, 460800),
|
||||
]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -6,12 +6,6 @@
|
||||
"dependencies": ["hardware", "usb", "homeassistant_hardware"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_connect_zbt2",
|
||||
"integration_type": "hardware",
|
||||
"loggers": [
|
||||
"bellows",
|
||||
"universal_silabs_flasher",
|
||||
"zigpy.serial",
|
||||
"serial_asyncio_fast"
|
||||
],
|
||||
"quality_scale": "bronze",
|
||||
"usb": [
|
||||
{
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.components.homeassistant_hardware.update import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -23,7 +24,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HomeAssistantConnectZBT2ConfigEntry
|
||||
from .config_flow import ZBT2FirmwareMixin
|
||||
from .const import DOMAIN, FIRMWARE, FIRMWARE_VERSION, HARDWARE_NAME, SERIAL_NUMBER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -134,8 +134,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Connect ZBT-2 firmware update entity."""
|
||||
|
||||
BOOTLOADER_RESET_METHODS = ZBT2FirmwareMixin.BOOTLOADER_RESET_METHODS
|
||||
APPLICATION_PROBE_METHODS = ZBT2FirmwareMixin.APPLICATION_PROBE_METHODS
|
||||
bootloader_reset_methods = [ResetTarget.RTS_DTR]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -81,7 +81,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
|
||||
BOOTLOADER_RESET_METHODS: list[ResetTarget] = [] # Default, subclasses may override
|
||||
APPLICATION_PROBE_METHODS: list[tuple[ApplicationType, int]] = []
|
||||
|
||||
_picked_firmware_type: PickedFirmwareType
|
||||
_zigbee_flow_strategy: ZigbeeFlowStrategy = ZigbeeFlowStrategy.RECOMMENDED
|
||||
@@ -231,11 +230,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(
|
||||
self._device,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
)
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(self._device)
|
||||
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type != expected_installed_firmware_type
|
||||
@@ -300,7 +295,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"universal-silabs-flasher==0.1.0",
|
||||
"universal-silabs-flasher==0.0.37",
|
||||
"ha-silabs-firmware-client==0.3.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -86,8 +86,7 @@ class BaseFirmwareUpdateEntity(
|
||||
|
||||
# Subclasses provide the mapping between firmware types and entity descriptions
|
||||
entity_description: FirmwareUpdateEntityDescription
|
||||
BOOTLOADER_RESET_METHODS: list[ResetTarget]
|
||||
APPLICATION_PROBE_METHODS: list[tuple[ApplicationType, int]]
|
||||
bootloader_reset_methods: list[ResetTarget] = []
|
||||
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
|
||||
@@ -279,8 +278,7 @@ class BaseFirmwareUpdateEntity(
|
||||
device=self._current_device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
bootloader_reset_methods=self.bootloader_reset_methods,
|
||||
progress_callback=self._update_progress,
|
||||
domain=self._config_entry.domain,
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Callable, Sequence
|
||||
from collections.abc import AsyncIterator, Callable, Iterable, Sequence
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
@@ -309,20 +309,15 @@ async def guess_firmware_info(hass: HomeAssistant, device_path: str) -> Firmware
|
||||
|
||||
|
||||
async def probe_silabs_firmware_info(
|
||||
device: str,
|
||||
*,
|
||||
bootloader_reset_methods: Sequence[ResetTarget],
|
||||
application_probe_methods: Sequence[tuple[ApplicationType, int]],
|
||||
device: str, *, probe_methods: Iterable[ApplicationType] | None = None
|
||||
) -> FirmwareInfo | None:
|
||||
"""Probe the running firmware on a SiLabs device."""
|
||||
flasher = Flasher(
|
||||
device=device,
|
||||
probe_methods=tuple(
|
||||
(m.as_flasher_application_type(), baudrate)
|
||||
for m, baudrate in application_probe_methods
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
**(
|
||||
{"probe_methods": [m.as_flasher_application_type() for m in probe_methods]}
|
||||
if probe_methods
|
||||
else {}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -348,18 +343,11 @@ async def probe_silabs_firmware_info(
|
||||
|
||||
|
||||
async def probe_silabs_firmware_type(
|
||||
device: str,
|
||||
*,
|
||||
bootloader_reset_methods: Sequence[ResetTarget],
|
||||
application_probe_methods: Sequence[tuple[ApplicationType, int]],
|
||||
device: str, *, probe_methods: Iterable[ApplicationType] | None = None
|
||||
) -> ApplicationType | None:
|
||||
"""Probe the running firmware type on a SiLabs device."""
|
||||
|
||||
fw_info = await probe_silabs_firmware_info(
|
||||
device,
|
||||
bootloader_reset_methods=bootloader_reset_methods,
|
||||
application_probe_methods=application_probe_methods,
|
||||
)
|
||||
fw_info = await probe_silabs_firmware_info(device, probe_methods=probe_methods)
|
||||
if fw_info is None:
|
||||
return None
|
||||
|
||||
@@ -371,22 +359,12 @@ async def async_flash_silabs_firmware(
|
||||
device: str,
|
||||
fw_data: bytes,
|
||||
expected_installed_firmware_type: ApplicationType,
|
||||
bootloader_reset_methods: Sequence[ResetTarget],
|
||||
application_probe_methods: Sequence[tuple[ApplicationType, int]],
|
||||
bootloader_reset_methods: Sequence[ResetTarget] = (),
|
||||
progress_callback: Callable[[int, int], None] | None = None,
|
||||
*,
|
||||
domain: str = DOMAIN,
|
||||
) -> FirmwareInfo:
|
||||
"""Flash firmware to the SiLabs device."""
|
||||
if not any(
|
||||
method == expected_installed_firmware_type
|
||||
for method, _ in application_probe_methods
|
||||
):
|
||||
raise ValueError(
|
||||
f"Expected installed firmware type {expected_installed_firmware_type!r}"
|
||||
f" not in application probe methods {application_probe_methods!r}"
|
||||
)
|
||||
|
||||
async with async_firmware_update_context(hass, device, domain):
|
||||
firmware_info = await guess_firmware_info(hass, device)
|
||||
_LOGGER.debug("Identified firmware info: %s", firmware_info)
|
||||
@@ -395,9 +373,11 @@ async def async_flash_silabs_firmware(
|
||||
|
||||
flasher = Flasher(
|
||||
device=device,
|
||||
probe_methods=tuple(
|
||||
(m.as_flasher_application_type(), baudrate)
|
||||
for m, baudrate in application_probe_methods
|
||||
probe_methods=(
|
||||
ApplicationType.GECKO_BOOTLOADER.as_flasher_application_type(),
|
||||
ApplicationType.EZSP.as_flasher_application_type(),
|
||||
ApplicationType.SPINEL.as_flasher_application_type(),
|
||||
ApplicationType.CPC.as_flasher_application_type(),
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
@@ -421,13 +401,7 @@ async def async_flash_silabs_firmware(
|
||||
|
||||
probed_firmware_info = await probe_silabs_firmware_info(
|
||||
device,
|
||||
bootloader_reset_methods=bootloader_reset_methods,
|
||||
# Only probe for the expected installed firmware type
|
||||
application_probe_methods=[
|
||||
(method, baudrate)
|
||||
for method, baudrate in application_probe_methods
|
||||
if method == expected_installed_firmware_type
|
||||
],
|
||||
probe_methods=(expected_installed_firmware_type,),
|
||||
)
|
||||
|
||||
if probed_firmware_info is None:
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant.components.homeassistant_hardware.helpers import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.usb import (
|
||||
usb_service_info_from_device,
|
||||
@@ -80,20 +79,6 @@ class SkyConnectFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
|
||||
context: ConfigFlowContext
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200
|
||||
# There is no hardware bootloader trigger
|
||||
BOOTLOADER_RESET_METHODS: list[ResetTarget] = []
|
||||
APPLICATION_PROBE_METHODS = [
|
||||
(ApplicationType.GECKO_BOOTLOADER, 115200),
|
||||
(ApplicationType.EZSP, ZIGBEE_BAUDRATE),
|
||||
(ApplicationType.SPINEL, 460800),
|
||||
# CPC baudrates can be removed once multiprotocol is removed
|
||||
(ApplicationType.CPC, 115200),
|
||||
(ApplicationType.CPC, 230400),
|
||||
(ApplicationType.CPC, 460800),
|
||||
(ApplicationType.ROUTER, 115200),
|
||||
]
|
||||
|
||||
def _get_translation_placeholders(self) -> dict[str, str]:
|
||||
"""Shared translation placeholders."""
|
||||
placeholders = {
|
||||
|
||||
@@ -6,12 +6,6 @@
|
||||
"dependencies": ["hardware", "usb", "homeassistant_hardware"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_sky_connect",
|
||||
"integration_type": "hardware",
|
||||
"loggers": [
|
||||
"bellows",
|
||||
"universal_silabs_flasher",
|
||||
"zigpy.serial",
|
||||
"serial_asyncio_fast"
|
||||
],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*skyconnect v1.0*",
|
||||
|
||||
@@ -23,7 +23,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HomeAssistantSkyConnectConfigEntry
|
||||
from .config_flow import SkyConnectFirmwareMixin
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
FIRMWARE,
|
||||
@@ -152,8 +151,8 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""SkyConnect firmware update entity."""
|
||||
|
||||
BOOTLOADER_RESET_METHODS = SkyConnectFirmwareMixin.BOOTLOADER_RESET_METHODS
|
||||
APPLICATION_PROBE_METHODS = SkyConnectFirmwareMixin.APPLICATION_PROBE_METHODS
|
||||
# The ZBT-1 does not have a hardware bootloader trigger
|
||||
bootloader_reset_methods = []
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -82,18 +82,7 @@ else:
|
||||
class YellowFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Yellow firmware methods."""
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.YELLOW]
|
||||
APPLICATION_PROBE_METHODS = [
|
||||
(ApplicationType.GECKO_BOOTLOADER, 115200),
|
||||
(ApplicationType.EZSP, ZIGBEE_BAUDRATE),
|
||||
(ApplicationType.SPINEL, 460800),
|
||||
# CPC baudrates can be removed once multiprotocol is removed
|
||||
(ApplicationType.CPC, 115200),
|
||||
(ApplicationType.CPC, 230400),
|
||||
(ApplicationType.CPC, 460800),
|
||||
(ApplicationType.ROUTER, 115200),
|
||||
]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -157,11 +146,7 @@ class HomeAssistantYellowConfigFlow(
|
||||
assert self._device is not None
|
||||
|
||||
# We do not actually use any portion of `BaseFirmwareConfigFlow` beyond this
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(
|
||||
self._device,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
)
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(self._device)
|
||||
|
||||
# Kick off ZHA hardware discovery automatically if Zigbee firmware is running
|
||||
if (
|
||||
|
||||
@@ -7,11 +7,5 @@
|
||||
"dependencies": ["hardware", "homeassistant_hardware"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_yellow",
|
||||
"integration_type": "hardware",
|
||||
"loggers": [
|
||||
"bellows",
|
||||
"universal_silabs_flasher",
|
||||
"zigpy.serial",
|
||||
"serial_asyncio_fast"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.components.homeassistant_hardware.update import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -23,7 +24,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HomeAssistantYellowConfigEntry
|
||||
from .config_flow import YellowFirmwareMixin
|
||||
from .const import DOMAIN, FIRMWARE, FIRMWARE_VERSION, MANUFACTURER, MODEL, RADIO_DEVICE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -150,8 +150,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Yellow firmware update entity."""
|
||||
|
||||
BOOTLOADER_RESET_METHODS = YellowFirmwareMixin.BOOTLOADER_RESET_METHODS
|
||||
APPLICATION_PROBE_METHODS = YellowFirmwareMixin.APPLICATION_PROBE_METHODS
|
||||
bootloader_reset_methods = [ResetTarget.YELLOW] # Triggers a GPIO reset
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -4,7 +4,6 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aiopvapi.resources.model import PowerviewData
|
||||
from aiopvapi.resources.shade_data import PowerviewShadeData
|
||||
from aiopvapi.rooms import Rooms
|
||||
from aiopvapi.scenes import Scenes
|
||||
from aiopvapi.shades import Shades
|
||||
@@ -17,6 +16,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from .const import DOMAIN, HUB_EXCEPTIONS, MANUFACTURER
|
||||
from .coordinator import PowerviewShadeUpdateCoordinator
|
||||
from .model import PowerviewConfigEntry, PowerviewEntryData
|
||||
from .shade_data import PowerviewShadeData
|
||||
from .util import async_connect_hub
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -8,7 +8,6 @@ import logging
|
||||
|
||||
from aiopvapi.helpers.aiorequest import PvApiMaintenance
|
||||
from aiopvapi.hub import Hub
|
||||
from aiopvapi.resources.shade_data import PowerviewShadeData
|
||||
from aiopvapi.shades import Shades
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -16,6 +15,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import HUB_EXCEPTIONS
|
||||
from .shade_data import PowerviewShadeData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -208,13 +208,13 @@ class PowerViewShadeBase(ShadeEntity, CoverEntity):
|
||||
async def _async_execute_move(self, move: ShadePosition) -> None:
|
||||
"""Execute a move that can affect multiple positions."""
|
||||
_LOGGER.debug("Move request %s: %s", self.name, move)
|
||||
# Store the requested positions so subsequent move
|
||||
# requests contain the secondary shade positions
|
||||
self.data.update_shade_position(self._shade.id, move)
|
||||
async with self.coordinator.radio_operation_lock:
|
||||
response = await self._shade.move(move)
|
||||
_LOGGER.debug("Move response %s: %s", self.name, response)
|
||||
|
||||
# Process the response from the hub (including new positions)
|
||||
self.data.update_shade_position(self._shade.id, response)
|
||||
|
||||
async def _async_set_cover_position(self, target_hass_position: int) -> None:
|
||||
"""Move the shade to a position."""
|
||||
target_hass_position = self._clamp_cover_limit(target_hass_position)
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import logging
|
||||
|
||||
from aiopvapi.resources.shade import BaseShade, ShadePosition
|
||||
from aiopvapi.resources.shade_data import PowerviewShadeData
|
||||
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -12,6 +11,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import PowerviewShadeUpdateCoordinator
|
||||
from .model import PowerviewDeviceInfo
|
||||
from .shade_data import PowerviewShadeData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -18,6 +18,6 @@
|
||||
},
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiopvapi"],
|
||||
"requirements": ["aiopvapi==3.3.0"],
|
||||
"requirements": ["aiopvapi==3.2.1"],
|
||||
"zeroconf": ["_powerview._tcp.local.", "_PowerView-G3._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
"""Shade data for the Hunter Douglas PowerView integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import fields
|
||||
from typing import Any
|
||||
|
||||
from aiopvapi.resources.model import PowerviewData
|
||||
from aiopvapi.resources.shade import BaseShade, ShadePosition
|
||||
|
||||
from .util import async_map_data_by_id
|
||||
|
||||
POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"]
|
||||
|
||||
|
||||
def copy_position_data(source: ShadePosition, target: ShadePosition) -> ShadePosition:
|
||||
"""Copy position data from source to target for None values only."""
|
||||
for field in POSITION_FIELDS:
|
||||
if (value := getattr(source, field.name)) is not None:
|
||||
setattr(target, field.name, value)
|
||||
|
||||
|
||||
class PowerviewShadeData:
|
||||
"""Coordinate shade data between multiple api calls."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init the shade data."""
|
||||
self._raw_data_by_id: dict[int, dict[str | int, Any]] = {}
|
||||
self._shade_group_data_by_id: dict[int, BaseShade] = {}
|
||||
self.positions: dict[int, ShadePosition] = {}
|
||||
|
||||
def get_raw_data(self, shade_id: int) -> dict[str | int, Any]:
|
||||
"""Get data for the shade."""
|
||||
return self._raw_data_by_id[shade_id]
|
||||
|
||||
def get_all_raw_data(self) -> dict[int, dict[str | int, Any]]:
|
||||
"""Get data for all shades."""
|
||||
return self._raw_data_by_id
|
||||
|
||||
def get_shade(self, shade_id: int) -> BaseShade:
|
||||
"""Get specific shade from the coordinator."""
|
||||
return self._shade_group_data_by_id[shade_id]
|
||||
|
||||
def get_shade_position(self, shade_id: int) -> ShadePosition:
|
||||
"""Get positions for a shade."""
|
||||
if shade_id not in self.positions:
|
||||
shade_position = ShadePosition()
|
||||
# If we have the group data, use it to populate the initial position
|
||||
if shade := self._shade_group_data_by_id.get(shade_id):
|
||||
copy_position_data(shade.current_position, shade_position)
|
||||
self.positions[shade_id] = shade_position
|
||||
return self.positions[shade_id]
|
||||
|
||||
def update_from_group_data(self, shade_id: int) -> None:
|
||||
"""Process an update from the group data."""
|
||||
data = self._shade_group_data_by_id[shade_id]
|
||||
copy_position_data(data.current_position, self.get_shade_position(data.id))
|
||||
|
||||
def store_group_data(self, shade_data: PowerviewData) -> None:
|
||||
"""Store data from the all shades endpoint.
|
||||
|
||||
This does not update the shades or positions (self.positions)
|
||||
as the data may be stale. update_from_group_data
|
||||
with a shade_id will update a specific shade
|
||||
from the group data.
|
||||
"""
|
||||
self._shade_group_data_by_id = shade_data.processed
|
||||
self._raw_data_by_id = async_map_data_by_id(shade_data.raw)
|
||||
|
||||
def update_shade_position(self, shade_id: int, new_position: ShadePosition) -> None:
|
||||
"""Update a single shades position."""
|
||||
copy_position_data(new_position, self.get_shade_position(shade_id))
|
||||
|
||||
def update_shade_velocity(self, shade_id: int, shade_data: ShadePosition) -> None:
|
||||
"""Update a single shades velocity."""
|
||||
# the hub will always return a velocity of 0 on initial connect,
|
||||
# separate definition to store consistent value in HA
|
||||
# this value is purely driven from HA
|
||||
if shade_data.velocity is not None:
|
||||
self.get_shade_position(shade_id).velocity = shade_data.velocity
|
||||
@@ -2,15 +2,25 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from typing import Any
|
||||
|
||||
from aiopvapi.helpers.aiorequest import AioRequest
|
||||
from aiopvapi.helpers.constants import ATTR_ID
|
||||
from aiopvapi.hub import Hub
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .model import PowerviewAPI, PowerviewDeviceInfo
|
||||
|
||||
|
||||
@callback
|
||||
def async_map_data_by_id(data: Iterable[dict[str | int, Any]]):
|
||||
"""Return a dict with the key being the id for a list of entries."""
|
||||
return {entry[ATTR_ID]: entry for entry in data}
|
||||
|
||||
|
||||
async def async_connect_hub(
|
||||
hass: HomeAssistant, address: str, api_version: int | None = None
|
||||
) -> PowerviewAPI:
|
||||
|
||||
@@ -13,7 +13,6 @@ from typing import Any
|
||||
from aiohttp import web
|
||||
from hyperion import client
|
||||
from hyperion.const import (
|
||||
KEY_DATA,
|
||||
KEY_IMAGE,
|
||||
KEY_IMAGE_STREAM,
|
||||
KEY_LEDCOLORS,
|
||||
@@ -156,8 +155,7 @@ class HyperionCamera(Camera):
|
||||
"""Update Hyperion components."""
|
||||
if not img:
|
||||
return
|
||||
# Prefer KEY_DATA (Hyperion server >= 2.1.1); fall back to KEY_RESULT for older server versions
|
||||
img_data = img.get(KEY_DATA, img.get(KEY_RESULT, {})).get(KEY_IMAGE)
|
||||
img_data = img.get(KEY_RESULT, {}).get(KEY_IMAGE)
|
||||
if not img_data or not img_data.startswith(IMAGE_STREAM_JPG_SENTINEL):
|
||||
return
|
||||
async with self._image_cond:
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyituran import Vehicle
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -68,7 +69,7 @@ class IturanBinarySensor(IturanBaseEntity, BinarySensorEntity):
|
||||
super().__init__(coordinator, license_plate, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.entity_description.value_fn(self.vehicle)
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.device_tracker import TrackerEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -38,12 +40,12 @@ class IturanDeviceTracker(IturanBaseEntity, TrackerEntity):
|
||||
"""Initialize the device tracker."""
|
||||
super().__init__(coordinator, license_plate, "device_tracker")
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def latitude(self) -> float | None:
|
||||
"""Return latitude value of the device."""
|
||||
return self.vehicle.gps_coordinates[0]
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def longitude(self) -> float | None:
|
||||
"""Return longitude value of the device."""
|
||||
return self.vehicle.gps_coordinates[1]
|
||||
|
||||
@@ -6,6 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyituran import Vehicle
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -132,7 +133,7 @@ class IturanSensor(IturanBaseEntity, SensorEntity):
|
||||
super().__init__(coordinator, license_plate, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the device."""
|
||||
return self.entity_description.value_fn(self.vehicle)
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.1.3"]
|
||||
"requirements": ["pylamarzocco==2.1.2"]
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
|
||||
await self.coordinator.device.update_firmware()
|
||||
while (
|
||||
update_progress := await self.coordinator.device.get_firmware()
|
||||
).command_status is not UpdateStatus.UPDATED:
|
||||
).command_status is UpdateStatus.IN_PROGRESS:
|
||||
if counter >= MAX_UPDATE_WAIT:
|
||||
_raise_timeout_error()
|
||||
self._attr_update_percentage = update_progress.progress_percentage
|
||||
|
||||
@@ -94,6 +94,28 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"address_to_device_id": {
|
||||
"description": "Converts an LCN address into a device ID.",
|
||||
"fields": {
|
||||
"host": {
|
||||
"description": "Host name as given in the integration panel.",
|
||||
"name": "Host name"
|
||||
},
|
||||
"id": {
|
||||
"description": "Module or group number of the target.",
|
||||
"name": "Module or group ID"
|
||||
},
|
||||
"segment_id": {
|
||||
"description": "Segment number of the target.",
|
||||
"name": "Segment ID"
|
||||
},
|
||||
"type": {
|
||||
"description": "Module type of the target.",
|
||||
"name": "Type"
|
||||
}
|
||||
},
|
||||
"name": "Address to device ID"
|
||||
},
|
||||
"dyn_text": {
|
||||
"description": "Sends dynamic text to LCN-GTxD displays.",
|
||||
"fields": {
|
||||
|
||||
131
homeassistant/components/light/condition.py
Normal file
131
homeassistant/components/light/condition.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Provides conditions for lights."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import TYPE_CHECKING, Any, Final, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS, CONF_TARGET, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant, split_entity_id
|
||||
from homeassistant.helpers import config_validation as cv, target
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_BEHAVIOR: Final = "behavior"
|
||||
BEHAVIOR_ANY: Final = "any"
|
||||
BEHAVIOR_ALL: Final = "all"
|
||||
|
||||
|
||||
STATE_CONDITION_VALID_STATES: Final = [STATE_ON, STATE_OFF]
|
||||
STATE_CONDITION_OPTIONS_SCHEMA: dict[vol.Marker, Any] = {
|
||||
vol.Required(ATTR_BEHAVIOR, default=BEHAVIOR_ANY): vol.In(
|
||||
[BEHAVIOR_ANY, BEHAVIOR_ALL]
|
||||
),
|
||||
}
|
||||
STATE_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
vol.Required(CONF_OPTIONS): STATE_CONDITION_OPTIONS_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class StateConditionBase(Condition):
|
||||
"""State condition."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return STATE_CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config: ConditionConfig, state: str
|
||||
) -> None:
|
||||
"""Initialize condition."""
|
||||
self._hass = hass
|
||||
if TYPE_CHECKING:
|
||||
assert config.target
|
||||
assert config.options
|
||||
self._target = config.target
|
||||
self._behavior = config.options[ATTR_BEHAVIOR]
|
||||
self._state = state
|
||||
|
||||
@override
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Get the condition checker."""
|
||||
|
||||
def check_any_match_state(states: list[str]) -> bool:
|
||||
"""Test if any entity match the state."""
|
||||
return any(state == self._state for state in states)
|
||||
|
||||
def check_all_match_state(states: list[str]) -> bool:
|
||||
"""Test if all entities match the state."""
|
||||
return all(state == self._state for state in states)
|
||||
|
||||
matcher: Callable[[list[str]], bool]
|
||||
if self._behavior == BEHAVIOR_ANY:
|
||||
matcher = check_any_match_state
|
||||
elif self._behavior == BEHAVIOR_ALL:
|
||||
matcher = check_all_match_state
|
||||
|
||||
@trace_condition_function
|
||||
def test_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
"""Test state condition."""
|
||||
selector_data = target.TargetSelectorData(self._target)
|
||||
targeted_entities = target.async_extract_referenced_entity_ids(
|
||||
hass, selector_data, expand_group=False
|
||||
)
|
||||
referenced_entity_ids = targeted_entities.referenced.union(
|
||||
targeted_entities.indirectly_referenced
|
||||
)
|
||||
light_entity_ids = {
|
||||
entity_id
|
||||
for entity_id in referenced_entity_ids
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
light_entity_states = [
|
||||
state.state
|
||||
for entity_id in light_entity_ids
|
||||
if (state := hass.states.get(entity_id))
|
||||
and state.state in STATE_CONDITION_VALID_STATES
|
||||
]
|
||||
return matcher(light_entity_states)
|
||||
|
||||
return test_state
|
||||
|
||||
|
||||
class IsOnCondition(StateConditionBase):
|
||||
"""Is on condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
super().__init__(hass, config, STATE_ON)
|
||||
|
||||
|
||||
class IsOffCondition(StateConditionBase):
|
||||
"""Is off condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
super().__init__(hass, config, STATE_OFF)
|
||||
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_off": IsOffCondition,
|
||||
"is_on": IsOnCondition,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the light conditions."""
|
||||
return CONDITIONS
|
||||
28
homeassistant/components/light/conditions.yaml
Normal file
28
homeassistant/components/light/conditions.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
is_off:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
is_on:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"condition": "mdi:lightbulb-off"
|
||||
},
|
||||
"is_on": {
|
||||
"condition": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:lightbulb",
|
||||
@@ -25,5 +33,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"trigger": "mdi:lightbulb-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:lightbulb-on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,6 +36,30 @@
|
||||
"field_xy_color_name": "XY-color",
|
||||
"section_advanced_fields_name": "Advanced options"
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Test if a light is off.",
|
||||
"description_configured": "[%key:component::light::conditions::is_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "How the state should match on the targeted lights.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "If a light is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Test if a light is on.",
|
||||
"description_configured": "[%key:component::light::conditions::is_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "How the state should match on the targeted lights.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "If a light is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"brightness_decrease": "Decrease {entity_name} brightness",
|
||||
@@ -284,11 +308,30 @@
|
||||
"yellowgreen": "Yellow green"
|
||||
}
|
||||
},
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"flash": {
|
||||
"options": {
|
||||
"long": "Long",
|
||||
"short": "Short"
|
||||
}
|
||||
},
|
||||
"state": {
|
||||
"options": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -462,5 +505,29 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Light"
|
||||
"title": "Light",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a light is turned off.",
|
||||
"description_configured": "[%key:component::light::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted lights to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a light is turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a light is turned on.",
|
||||
"description_configured": "[%key:component::light::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::triggers::turned_off::fields::behavior::description%]",
|
||||
"name": "[%key:component::light::triggers::turned_off::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a light is turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
homeassistant/components/light/trigger.py
Normal file
17
homeassistant/components/light/trigger.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides triggers for lights."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for lights."""
|
||||
return TRIGGERS
|
||||
31
homeassistant/components/light/triggers.yaml
Normal file
31
homeassistant/components/light/triggers.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
turned_on:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
turned_off:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==11.1.0"]
|
||||
"requirements": ["ical==11.0.0"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==11.1.0"]
|
||||
"requirements": ["ical==11.0.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["lunatone-rest-api-client==0.5.7"]
|
||||
"requirements": ["lunatone-rest-api-client==0.5.3"]
|
||||
}
|
||||
|
||||
@@ -353,13 +353,17 @@ DISCOVERY_SCHEMAS = [
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
# DeviceFault or SupplyFault bit enabled
|
||||
device_to_ha=lambda x: bool(
|
||||
x
|
||||
& (
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault
|
||||
| clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault
|
||||
)
|
||||
),
|
||||
device_to_ha={
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault: True,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault: True,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedLow: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedHigh: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kLocalOverride: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemotePressure: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteFlow: False,
|
||||
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteTemperature: False,
|
||||
}.get,
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(
|
||||
@@ -373,9 +377,9 @@ DISCOVERY_SCHEMAS = [
|
||||
key="PumpStatusRunning",
|
||||
translation_key="pump_running",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
device_to_ha=lambda x: bool(
|
||||
device_to_ha=lambda x: (
|
||||
x
|
||||
& clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
|
||||
== clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -391,8 +395,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="dishwasher_alarm_inflow",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: bool(
|
||||
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -406,8 +410,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="alarm_door",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: bool(
|
||||
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
@@ -477,8 +481,8 @@ DISCOVERY_SCHEMAS = [
|
||||
translation_key="alarm_door",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_to_ha=lambda x: bool(
|
||||
x & clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
|
||||
device_to_ha=lambda x: (
|
||||
x == clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiomealie==1.1.0"]
|
||||
"requirements": ["aiomealie==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -104,5 +104,10 @@
|
||||
"volume_up": {
|
||||
"service": "mdi:volume-plus"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"stopped_playing": {
|
||||
"trigger": "mdi:stop"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,6 +177,13 @@
|
||||
"off": "[%key:common::state::off%]",
|
||||
"one": "Repeat one"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -367,5 +374,18 @@
|
||||
"name": "Turn up volume"
|
||||
}
|
||||
},
|
||||
"title": "Media player"
|
||||
"title": "Media player",
|
||||
"triggers": {
|
||||
"stopped_playing": {
|
||||
"description": "Triggers when a media player stops playing.",
|
||||
"description_configured": "[%key:component::media_player::triggers::stopped_playing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted media players to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a media player stops playing"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
41
homeassistant/components/media_player/trigger.py
Normal file
41
homeassistant/components/media_player/trigger.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Provides triggers for media players."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.trigger import EntityTriggerBase, Trigger
|
||||
|
||||
from . import MediaPlayerState
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class MediaPlayerStoppedPlayingTrigger(EntityTriggerBase):
|
||||
"""Class for media player stopped playing trigger."""
|
||||
|
||||
_domain: str = DOMAIN
|
||||
_from_states = {
|
||||
MediaPlayerState.BUFFERING,
|
||||
MediaPlayerState.PAUSED,
|
||||
MediaPlayerState.PLAYING,
|
||||
}
|
||||
_to_states = {
|
||||
MediaPlayerState.IDLE,
|
||||
MediaPlayerState.OFF,
|
||||
MediaPlayerState.ON,
|
||||
}
|
||||
|
||||
def is_from_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the origin state."""
|
||||
return state.state in self._from_states
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
return state.state in self._to_states
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"stopped_playing": MediaPlayerStoppedPlayingTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for media players."""
|
||||
return TRIGGERS
|
||||
15
homeassistant/components/media_player/triggers.yaml
Normal file
15
homeassistant/components/media_player/triggers.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
stopped_playing:
|
||||
target:
|
||||
entity:
|
||||
domain: media_player
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
@@ -1009,7 +1009,7 @@
|
||||
"cleaning_care_program": "Cleaning/care program",
|
||||
"maintenance_program": "Maintenance program",
|
||||
"normal_operation_mode": "Normal operation mode",
|
||||
"own_program": "Program"
|
||||
"own_program": "Own program"
|
||||
}
|
||||
},
|
||||
"remaining_time": {
|
||||
@@ -1089,7 +1089,7 @@
|
||||
"message": "Invalid device targeted."
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
"message": "OAuth2 implementation unavailable, will retry"
|
||||
},
|
||||
"set_program_error": {
|
||||
"message": "'Set program' action failed: {status} / {message}"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["mill", "mill_local"],
|
||||
"requirements": ["millheater==0.14.1", "mill-local==0.3.0"]
|
||||
"requirements": ["millheater==0.14.0", "mill-local==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -61,12 +61,10 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities([MobileAppBinarySensor(data, config_entry)])
|
||||
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
)
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -72,12 +72,10 @@ async def async_setup_entry(
|
||||
|
||||
async_add_entities([MobileAppSensor(data, config_entry)])
|
||||
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
)
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
f"{DOMAIN}_{ENTITY_TYPE}_register",
|
||||
handle_sensor_registration,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from music_assistant_client.exceptions import (
|
||||
from music_assistant_models.api import ServerInfoMessage
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_IGNORE, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
@@ -21,14 +21,21 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
DEFAULT_TITLE = "Music Assistant"
|
||||
DEFAULT_URL = "http://mass.local:8095"
|
||||
DEFAULT_TITLE = "Music Assistant"
|
||||
|
||||
|
||||
STEP_USER_SCHEMA = vol.Schema({vol.Required(CONF_URL): str})
|
||||
def get_manual_schema(user_input: dict[str, Any]) -> vol.Schema:
|
||||
"""Return a schema for the manual step."""
|
||||
default_url = user_input.get(CONF_URL, DEFAULT_URL)
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL, default=default_url): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage:
|
||||
async def get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage:
|
||||
"""Validate the user input allows us to connect."""
|
||||
async with MusicAssistantClient(
|
||||
url, aiohttp_client.async_get_clientsession(hass)
|
||||
@@ -45,17 +52,25 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Set up flow instance."""
|
||||
self.url: str | None = None
|
||||
self.server_info: ServerInfoMessage | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a manual configuration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
server_info = await _get_server_info(self.hass, user_input[CONF_URL])
|
||||
self.server_info = await get_server_info(
|
||||
self.hass, user_input[CONF_URL]
|
||||
)
|
||||
await self.async_set_unique_id(
|
||||
self.server_info.server_id, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_URL: user_input[CONF_URL]},
|
||||
reload_on_update=True,
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidServerVersion:
|
||||
@@ -64,49 +79,68 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(
|
||||
server_info.server_id, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_URL: user_input[CONF_URL]}
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={CONF_URL: user_input[CONF_URL]},
|
||||
data={
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
},
|
||||
)
|
||||
|
||||
suggested_values = user_input
|
||||
if suggested_values is None:
|
||||
suggested_values = {CONF_URL: DEFAULT_URL}
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=get_manual_schema(user_input), errors=errors
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_SCHEMA, suggested_values
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
return self.async_show_form(step_id="user", data_schema=get_manual_schema({}))
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a zeroconf discovery for a Music Assistant server."""
|
||||
"""Handle a discovered Mass server.
|
||||
|
||||
This flow is triggered by the Zeroconf component. It will check if the
|
||||
host is already configured and delegate to the import step if not.
|
||||
"""
|
||||
# abort if discovery info is not what we expect
|
||||
if "server_id" not in discovery_info.properties:
|
||||
return self.async_abort(reason="missing_server_id")
|
||||
|
||||
self.server_info = ServerInfoMessage.from_dict(discovery_info.properties)
|
||||
await self.async_set_unique_id(self.server_info.server_id)
|
||||
|
||||
# Check if we already have a config entry for this server_id
|
||||
existing_entry = self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, self.server_info.server_id
|
||||
)
|
||||
|
||||
if existing_entry:
|
||||
# If the entry was ignored or disabled, don't make any changes
|
||||
if existing_entry.source == SOURCE_IGNORE or existing_entry.disabled_by:
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
# Test connectivity to the current URL first
|
||||
current_url = existing_entry.data[CONF_URL]
|
||||
try:
|
||||
await get_server_info(self.hass, current_url)
|
||||
# Current URL is working, no need to update
|
||||
return self.async_abort(reason="already_configured")
|
||||
except CannotConnect:
|
||||
# Current URL is not working, update to the discovered URL
|
||||
# and continue to discovery confirm
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry,
|
||||
data={**existing_entry.data, CONF_URL: self.server_info.base_url},
|
||||
)
|
||||
# Schedule reload since URL changed
|
||||
self.hass.config_entries.async_schedule_reload(existing_entry.entry_id)
|
||||
else:
|
||||
# No existing entry, proceed with normal flow
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Test connectivity to the discovered URL
|
||||
try:
|
||||
server_info = ServerInfoMessage.from_dict(discovery_info.properties)
|
||||
except LookupError:
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
|
||||
self.url = server_info.base_url
|
||||
|
||||
await self.async_set_unique_id(server_info.server_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_URL: self.url})
|
||||
|
||||
try:
|
||||
await _get_server_info(self.hass, self.url)
|
||||
await get_server_info(self.hass, self.server_info.base_url)
|
||||
except CannotConnect:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
@@ -114,16 +148,16 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user-confirmation of discovered server."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.url is not None
|
||||
|
||||
assert self.server_info is not None
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={CONF_URL: self.url},
|
||||
data={
|
||||
CONF_URL: self.server_info.base_url,
|
||||
},
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
description_placeholders={"url": self.url},
|
||||
description_placeholders={"url": self.server_info.base_url},
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "music_assistant",
|
||||
"name": "Music Assistant",
|
||||
"after_dependencies": ["media_source", "media_player"],
|
||||
"codeowners": ["@music-assistant", "@arturpragacz"],
|
||||
"codeowners": ["@music-assistant"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/music_assistant",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
"message": "Error while loading the integration."
|
||||
},
|
||||
"implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
"message": "OAuth2 implementation is not available, will retry."
|
||||
},
|
||||
"incorrect_oauth2_scope": {
|
||||
"message": "Stored permissions are invalid. Please login again to update permissions."
|
||||
|
||||
@@ -20,11 +20,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_entry_oauth2_flow,
|
||||
config_validation as cv,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
@@ -74,19 +73,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Netatmo from a config entry."""
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="oauth2_implementation_unavailable",
|
||||
) from err
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
|
||||
# Set unique id if non was set (migration)
|
||||
if not entry.unique_id:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=DOMAIN)
|
||||
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
try:
|
||||
await session.async_ensure_token_valid()
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
|
||||
@@ -143,11 +143,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"public_weather": {
|
||||
|
||||
@@ -12,12 +12,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import _LOGGER
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.LIGHT,
|
||||
Platform.SCENE,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SCENE]
|
||||
|
||||
type NikoHomeControlConfigEntry = ConfigEntry[NHCController]
|
||||
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
"""Support for Niko Home Control thermostats."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from nhc.const import THERMOSTAT_MODES, THERMOSTAT_MODES_REVERSE
|
||||
from nhc.thermostat import NHCThermostat
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_ECO,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.components.sensor import UnitOfTemperature
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import NikoHomeControlConfigEntry
|
||||
from .const import (
|
||||
NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP,
|
||||
NikoHomeControlThermostatModes,
|
||||
)
|
||||
from .entity import NikoHomeControlEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: NikoHomeControlConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Niko Home Control thermostat entry."""
|
||||
controller = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
NikoHomeControlClimate(thermostat, controller, entry.entry_id)
|
||||
for thermostat in controller.thermostats.values()
|
||||
)
|
||||
|
||||
|
||||
class NikoHomeControlClimate(NikoHomeControlEntity, ClimateEntity):
|
||||
"""Representation of a Niko Home Control thermostat."""
|
||||
|
||||
_attr_supported_features: ClimateEntityFeature = (
|
||||
ClimateEntityFeature.PRESET_MODE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_name = None
|
||||
_action: NHCThermostat
|
||||
|
||||
_attr_translation_key = "nhc_thermostat"
|
||||
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.COOL, HVACMode.AUTO]
|
||||
|
||||
_attr_preset_modes = [
|
||||
"day",
|
||||
"night",
|
||||
PRESET_ECO,
|
||||
"prog1",
|
||||
"prog2",
|
||||
"prog3",
|
||||
]
|
||||
|
||||
def _get_niko_mode(self, mode: str) -> int:
|
||||
"""Return the Niko mode."""
|
||||
return THERMOSTAT_MODES_REVERSE.get(mode, NikoHomeControlThermostatModes.OFF)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
await self._action.set_temperature(kwargs.get(ATTR_TEMPERATURE))
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
await self._action.set_mode(self._get_niko_mode(preset_mode))
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
await self._action.set_mode(NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP[hvac_mode])
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn thermostat off."""
|
||||
await self._action.set_mode(NikoHomeControlThermostatModes.OFF)
|
||||
|
||||
def update_state(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
if self._action.state == NikoHomeControlThermostatModes.OFF:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_preset_mode = None
|
||||
elif self._action.state == NikoHomeControlThermostatModes.COOL:
|
||||
self._attr_hvac_mode = HVACMode.COOL
|
||||
self._attr_preset_mode = None
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.AUTO
|
||||
self._attr_preset_mode = THERMOSTAT_MODES[self._action.state]
|
||||
|
||||
self._attr_target_temperature = self._action.setpoint
|
||||
self._attr_current_temperature = self._action.measured
|
||||
@@ -1,23 +1,6 @@
|
||||
"""Constants for niko_home_control integration."""
|
||||
|
||||
from enum import IntEnum
|
||||
import logging
|
||||
|
||||
from homeassistant.components.climate import HVACMode
|
||||
|
||||
DOMAIN = "niko_home_control"
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP = {
|
||||
HVACMode.OFF: 3,
|
||||
HVACMode.COOL: 4,
|
||||
HVACMode.AUTO: 5,
|
||||
}
|
||||
|
||||
|
||||
class NikoHomeControlThermostatModes(IntEnum):
|
||||
"""Enum for Niko Home Control thermostat modes."""
|
||||
|
||||
OFF = 3
|
||||
COOL = 4
|
||||
AUTO = 5
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"climate": {
|
||||
"nhc_thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"default": "mdi:calendar-clock",
|
||||
"state": {
|
||||
"day": "mdi:weather-sunny",
|
||||
"night": "mdi:weather-night",
|
||||
"prog1": "mdi:numeric-1",
|
||||
"prog2": "mdi:numeric-2",
|
||||
"prog3": "mdi:numeric-3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,23 +26,5 @@
|
||||
"description": "Set up your Niko Home Control instance."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"climate": {
|
||||
"nhc_thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"day": "Day",
|
||||
"eco": "Eco",
|
||||
"night": "Night",
|
||||
"prog1": "Program 1",
|
||||
"prog2": "Program 2",
|
||||
"prog3": "Program 3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,7 +109,7 @@
|
||||
"message": "Cannot read {filename}, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`"
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
"message": "OAuth2 implementation unavailable, will retry"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Failed to update drive state"
|
||||
|
||||
@@ -256,7 +256,6 @@ class PlaystationNetworkFriendDataCoordinator(
|
||||
account_id=self.user.account_id,
|
||||
presence=self.user.get_presence(),
|
||||
profile=self.profile,
|
||||
trophy_summary=self.user.trophy_summary(),
|
||||
)
|
||||
except PSNAWPForbiddenError as error:
|
||||
raise UpdateFailed(
|
||||
|
||||
@@ -54,7 +54,7 @@ class PlaystationNetworkSensor(StrEnum):
|
||||
NOW_PLAYING = "now_playing"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
|
||||
SENSOR_DESCRIPTIONS_TROPHY: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
|
||||
PlaystationNetworkSensorEntityDescription(
|
||||
key=PlaystationNetworkSensor.TROPHY_LEVEL,
|
||||
translation_key=PlaystationNetworkSensor.TROPHY_LEVEL,
|
||||
@@ -106,6 +106,8 @@ SENSOR_DESCRIPTIONS: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
|
||||
else None
|
||||
),
|
||||
),
|
||||
)
|
||||
SENSOR_DESCRIPTIONS_USER: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
|
||||
PlaystationNetworkSensorEntityDescription(
|
||||
key=PlaystationNetworkSensor.ONLINE_ID,
|
||||
translation_key=PlaystationNetworkSensor.ONLINE_ID,
|
||||
@@ -150,7 +152,7 @@ async def async_setup_entry(
|
||||
coordinator = config_entry.runtime_data.user_data
|
||||
async_add_entities(
|
||||
PlaystationNetworkSensorEntity(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
for description in SENSOR_DESCRIPTIONS_TROPHY + SENSOR_DESCRIPTIONS_USER
|
||||
)
|
||||
|
||||
for (
|
||||
@@ -164,7 +166,7 @@ async def async_setup_entry(
|
||||
description,
|
||||
config_entry.subentries[subentry_id],
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
for description in SENSOR_DESCRIPTIONS_USER
|
||||
],
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
@@ -57,14 +57,12 @@ type SelectType = Literal[
|
||||
"select_gateway_mode",
|
||||
"select_regulation_mode",
|
||||
"select_schedule",
|
||||
"select_zone_profile",
|
||||
]
|
||||
type SelectOptionsType = Literal[
|
||||
"available_schedules",
|
||||
"dhw_modes",
|
||||
"gateway_modes",
|
||||
"regulation_modes",
|
||||
"zone_profiles",
|
||||
"available_schedules",
|
||||
]
|
||||
|
||||
# Default directives
|
||||
@@ -84,10 +82,3 @@ MASTER_THERMOSTATS: Final[list[str]] = [
|
||||
"zone_thermometer",
|
||||
"zone_thermostat",
|
||||
]
|
||||
|
||||
# Select constants
|
||||
SELECT_DHW_MODE: Final = "select_dhw_mode"
|
||||
SELECT_GATEWAY_MODE: Final = "select_gateway_mode"
|
||||
SELECT_REGULATION_MODE: Final = "select_regulation_mode"
|
||||
SELECT_SCHEDULE: Final = "select_schedule"
|
||||
SELECT_ZONE_PROFILE: Final = "select_zone_profile"
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["plugwise"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["plugwise==1.10.0"],
|
||||
"requirements": ["plugwise==1.9.0"],
|
||||
"zeroconf": ["_plugwise._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -9,15 +9,7 @@ from homeassistant.const import STATE_ON, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
SELECT_DHW_MODE,
|
||||
SELECT_GATEWAY_MODE,
|
||||
SELECT_REGULATION_MODE,
|
||||
SELECT_SCHEDULE,
|
||||
SELECT_ZONE_PROFILE,
|
||||
SelectOptionsType,
|
||||
SelectType,
|
||||
)
|
||||
from .const import SelectOptionsType, SelectType
|
||||
from .coordinator import PlugwiseConfigEntry, PlugwiseDataUpdateCoordinator
|
||||
from .entity import PlugwiseEntity
|
||||
from .util import plugwise_command
|
||||
@@ -35,34 +27,28 @@ class PlugwiseSelectEntityDescription(SelectEntityDescription):
|
||||
|
||||
SELECT_TYPES = (
|
||||
PlugwiseSelectEntityDescription(
|
||||
key=SELECT_SCHEDULE,
|
||||
translation_key=SELECT_SCHEDULE,
|
||||
key="select_schedule",
|
||||
translation_key="select_schedule",
|
||||
options_key="available_schedules",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key=SELECT_REGULATION_MODE,
|
||||
translation_key=SELECT_REGULATION_MODE,
|
||||
key="select_regulation_mode",
|
||||
translation_key="regulation_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="regulation_modes",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key=SELECT_DHW_MODE,
|
||||
translation_key=SELECT_DHW_MODE,
|
||||
key="select_dhw_mode",
|
||||
translation_key="dhw_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="dhw_modes",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key=SELECT_GATEWAY_MODE,
|
||||
translation_key=SELECT_GATEWAY_MODE,
|
||||
key="select_gateway_mode",
|
||||
translation_key="gateway_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="gateway_modes",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
key=SELECT_ZONE_PROFILE,
|
||||
translation_key=SELECT_ZONE_PROFILE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="zone_profiles",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"select_dhw_mode": {
|
||||
"dhw_mode": {
|
||||
"name": "DHW mode",
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
@@ -118,7 +118,7 @@
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"select_gateway_mode": {
|
||||
"gateway_mode": {
|
||||
"name": "Gateway mode",
|
||||
"state": {
|
||||
"away": "Pause",
|
||||
@@ -126,7 +126,7 @@
|
||||
"vacation": "Vacation"
|
||||
}
|
||||
},
|
||||
"select_regulation_mode": {
|
||||
"regulation_mode": {
|
||||
"name": "Regulation mode",
|
||||
"state": {
|
||||
"bleeding_cold": "Bleeding cold",
|
||||
@@ -141,14 +141,6 @@
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"select_zone_profile": {
|
||||
"name": "Zone profile",
|
||||
"state": {
|
||||
"active": "[%key:common::state::active%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"passive": "Passive"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
"message": "OAuth2 implementation unavailable, will retry"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ from sqlalchemy.orm import DeclarativeBase
|
||||
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
||||
|
||||
from ..const import SupportedDialect
|
||||
from ..db_schema import DOUBLE_PRECISION_TYPE_SQL, DOUBLE_TYPE, MYSQL_COLLATE
|
||||
from ..db_schema import DOUBLE_PRECISION_TYPE_SQL, DOUBLE_TYPE
|
||||
from ..util import session_scope
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -105,13 +105,12 @@ def _validate_table_schema_has_correct_collation(
|
||||
or dialect_kwargs.get("mariadb_collate")
|
||||
or connection.dialect._fetch_setting(connection, "collation_server") # type: ignore[attr-defined] # noqa: SLF001
|
||||
)
|
||||
if collate and collate != MYSQL_COLLATE:
|
||||
if collate and collate != "utf8mb4_unicode_ci":
|
||||
_LOGGER.debug(
|
||||
"Database %s collation is not %s",
|
||||
"Database %s collation is not utf8mb4_unicode_ci",
|
||||
table,
|
||||
MYSQL_COLLATE,
|
||||
)
|
||||
schema_errors.add(f"{table}.{MYSQL_COLLATE}")
|
||||
schema_errors.add(f"{table}.utf8mb4_unicode_ci")
|
||||
return schema_errors
|
||||
|
||||
|
||||
@@ -241,7 +240,7 @@ def correct_db_schema_utf8(
|
||||
table_name = table_object.__tablename__
|
||||
if (
|
||||
f"{table_name}.4-byte UTF-8" in schema_errors
|
||||
or f"{table_name}.{MYSQL_COLLATE}" in schema_errors
|
||||
or f"{table_name}.utf8mb4_unicode_ci" in schema_errors
|
||||
):
|
||||
from ..migration import ( # noqa: PLC0415
|
||||
_correct_table_character_set_and_collation,
|
||||
|
||||
@@ -26,9 +26,6 @@ def validate_db_schema(instance: Recorder) -> set[str]:
|
||||
schema_errors |= validate_table_schema_supports_utf8(
|
||||
instance, StatisticsMeta, (StatisticsMeta.statistic_id,)
|
||||
)
|
||||
schema_errors |= validate_table_schema_has_correct_collation(
|
||||
instance, StatisticsMeta
|
||||
)
|
||||
for table in (Statistics, StatisticsShortTerm):
|
||||
schema_errors |= validate_db_schema_precision(instance, table)
|
||||
schema_errors |= validate_table_schema_has_correct_collation(instance, table)
|
||||
|
||||
@@ -54,7 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
|
||||
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
||||
STATES_META_SCHEMA_VERSION = 38
|
||||
CIRCULAR_MEAN_SCHEMA_VERSION = 49
|
||||
UNIT_CLASS_SCHEMA_VERSION = 52
|
||||
UNIT_CLASS_SCHEMA_VERSION = 51
|
||||
|
||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
||||
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43
|
||||
|
||||
@@ -71,7 +71,7 @@ class LegacyBase(DeclarativeBase):
|
||||
"""Base class for tables, used for schema migration."""
|
||||
|
||||
|
||||
SCHEMA_VERSION = 53
|
||||
SCHEMA_VERSION = 51
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -128,7 +128,7 @@ LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX = "ix_states_entity_id_last_update
|
||||
LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID: Final = 36
|
||||
CONTEXT_ID_BIN_MAX_LENGTH = 16
|
||||
|
||||
MYSQL_COLLATE = "utf8mb4_bin"
|
||||
MYSQL_COLLATE = "utf8mb4_unicode_ci"
|
||||
MYSQL_DEFAULT_CHARSET = "utf8mb4"
|
||||
MYSQL_ENGINE = "InnoDB"
|
||||
|
||||
|
||||
@@ -13,15 +13,7 @@ from typing import TYPE_CHECKING, Any, TypedDict, cast, final
|
||||
from uuid import UUID
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import (
|
||||
ForeignKeyConstraint,
|
||||
MetaData,
|
||||
Table,
|
||||
cast as cast_,
|
||||
func,
|
||||
text,
|
||||
update,
|
||||
)
|
||||
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text, update
|
||||
from sqlalchemy.engine import CursorResult, Engine
|
||||
from sqlalchemy.exc import (
|
||||
DatabaseError,
|
||||
@@ -34,9 +26,8 @@ from sqlalchemy.exc import (
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint
|
||||
from sqlalchemy.sql.expression import and_, true
|
||||
from sqlalchemy.sql.expression import true
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.types import BINARY
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
@@ -1361,7 +1352,7 @@ class _SchemaVersion20Migrator(_SchemaVersionMigrator, target_version=20):
|
||||
class _SchemaVersion21Migrator(_SchemaVersionMigrator, target_version=21):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
# Try to change the character set of events, states and statistics_meta tables
|
||||
# Try to change the character set of the statistic_meta table
|
||||
if self.engine.dialect.name == SupportedDialect.MYSQL:
|
||||
for table in ("events", "states", "statistics_meta"):
|
||||
_correct_table_character_set_and_collation(table, self.session_maker)
|
||||
@@ -2053,94 +2044,17 @@ class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50):
|
||||
class _SchemaVersion51Migrator(_SchemaVersionMigrator, target_version=51):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
# Replaced with version 52 which corrects issues with MySQL string comparisons.
|
||||
|
||||
|
||||
class _SchemaVersion52Migrator(_SchemaVersionMigrator, target_version=52):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
if self.engine.dialect.name == SupportedDialect.MYSQL:
|
||||
self._apply_update_mysql()
|
||||
else:
|
||||
self._apply_update_postgresql_sqlite()
|
||||
|
||||
def _apply_update_mysql(self) -> None:
|
||||
"""Version specific update method for mysql."""
|
||||
# Add unit class column to StatisticsMeta
|
||||
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
|
||||
with session_scope(session=self.session_maker()) as session:
|
||||
connection = session.connection()
|
||||
for conv in _PRIMARY_UNIT_CONVERTERS:
|
||||
case_sensitive_units = {
|
||||
u.encode("utf-8") if u else u for u in conv.VALID_UNITS
|
||||
}
|
||||
# Reset unit_class to None for entries that do not match
|
||||
# the valid units (case sensitive) but matched before due to
|
||||
# case insensitive comparisons.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(
|
||||
and_(
|
||||
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
|
||||
cast_(StatisticsMeta.unit_of_measurement, BINARY).not_in(
|
||||
case_sensitive_units
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(unit_class=None)
|
||||
)
|
||||
# Do an explicitly case sensitive match (actually binary) to set the
|
||||
# correct unit_class. This is needed because we use the case sensitive
|
||||
# utf8mb4_unicode_ci collation.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(
|
||||
and_(
|
||||
cast_(StatisticsMeta.unit_of_measurement, BINARY).in_(
|
||||
case_sensitive_units
|
||||
),
|
||||
StatisticsMeta.unit_class.is_(None),
|
||||
)
|
||||
)
|
||||
.where(StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS))
|
||||
.values(unit_class=conv.UNIT_CLASS)
|
||||
)
|
||||
|
||||
def _apply_update_postgresql_sqlite(self) -> None:
|
||||
"""Version specific update method for postgresql and sqlite."""
|
||||
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
|
||||
with session_scope(session=self.session_maker()) as session:
|
||||
connection = session.connection()
|
||||
for conv in _PRIMARY_UNIT_CONVERTERS:
|
||||
# Set the correct unit_class. Unlike MySQL, Postgres and SQLite
|
||||
# have case sensitive string comparisons by default, so we
|
||||
# can directly match on the valid units.
|
||||
connection.execute(
|
||||
update(StatisticsMeta)
|
||||
.where(
|
||||
and_(
|
||||
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
|
||||
StatisticsMeta.unit_class.is_(None),
|
||||
)
|
||||
)
|
||||
.values(unit_class=conv.UNIT_CLASS)
|
||||
)
|
||||
|
||||
|
||||
class _SchemaVersion53Migrator(_SchemaVersionMigrator, target_version=53):
|
||||
def _apply_update(self) -> None:
|
||||
"""Version specific update method."""
|
||||
# Try to change the character set of events, states and statistics_meta tables
|
||||
if self.engine.dialect.name == SupportedDialect.MYSQL:
|
||||
for table in (
|
||||
"events",
|
||||
"event_data",
|
||||
"states",
|
||||
"state_attributes",
|
||||
"statistics",
|
||||
"statistics_meta",
|
||||
"statistics_short_term",
|
||||
):
|
||||
_correct_table_character_set_and_collation(table, self.session_maker)
|
||||
|
||||
|
||||
def _migrate_statistics_columns_to_timestamp_removing_duplicates(
|
||||
hass: HomeAssistant,
|
||||
@@ -2184,10 +2098,8 @@ def _correct_table_character_set_and_collation(
|
||||
"""Correct issues detected by validate_db_schema."""
|
||||
# Attempt to convert the table to utf8mb4
|
||||
_LOGGER.warning(
|
||||
"Updating table %s to character set %s and collation %s. %s",
|
||||
"Updating character set and collation of table %s to utf8mb4. %s",
|
||||
table,
|
||||
MYSQL_DEFAULT_CHARSET,
|
||||
MYSQL_COLLATE,
|
||||
MIGRATION_NOTE_MINUTES,
|
||||
)
|
||||
with (
|
||||
|
||||
@@ -26,7 +26,7 @@ CACHE_SIZE = 8192
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
QUERY_STATISTICS_META = (
|
||||
QUERY_STATISTIC_META = (
|
||||
StatisticsMeta.id,
|
||||
StatisticsMeta.statistic_id,
|
||||
StatisticsMeta.source,
|
||||
@@ -55,7 +55,7 @@ def _generate_get_metadata_stmt(
|
||||
|
||||
Depending on the schema version, either mean_type (added in version 49) or has_mean column is used.
|
||||
"""
|
||||
columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTICS_META)
|
||||
columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTIC_META)
|
||||
if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION:
|
||||
columns.append(StatisticsMeta.mean_type)
|
||||
else:
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==11.1.0"]
|
||||
"requirements": ["ical==11.0.0"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user