mirror of
https://github.com/home-assistant/core.git
synced 2025-09-24 04:19:33 +00:00
Compare commits
1 Commits
openai-mod
...
fix-radio-
Author | SHA1 | Date | |
---|---|---|---|
![]() |
35c8fefbd6 |
@@ -8,7 +8,6 @@
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
// Port 5683 udp is used by Shelly integration
|
||||
|
6
.github/ISSUE_TEMPLATE/task.yml
vendored
6
.github/ISSUE_TEMPLATE/task.yml
vendored
@@ -21,7 +21,7 @@ body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
label: Task description
|
||||
description: |
|
||||
Provide a clear and detailed description of the task that needs to be accomplished.
|
||||
|
||||
@@ -43,11 +43,9 @@ body:
|
||||
|
||||
Include links to related issues, research, prototypes, roadmap opportunities etc.
|
||||
placeholder: |
|
||||
- Roadmap opportunity: [link]
|
||||
- Epic: [link]
|
||||
- Roadmap opportunity: [links]
|
||||
- Feature request: [link]
|
||||
- Technical design documents: [link]
|
||||
- Prototype/mockup: [link]
|
||||
- Dependencies: [links]
|
||||
validations:
|
||||
required: false
|
||||
|
8
.github/copilot-instructions.md
vendored
8
.github/copilot-instructions.md
vendored
@@ -45,12 +45,6 @@ rules:
|
||||
|
||||
**When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules.
|
||||
|
||||
## Code Review Guidelines
|
||||
|
||||
**When reviewing code, do NOT comment on:**
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
@@ -1155,7 +1149,7 @@ _LOGGER.debug("Processing data: %s", data) # Use lazy logging
|
||||
### Validation Commands
|
||||
```bash
|
||||
# Check specific integration
|
||||
python -m script.hassfest --integration-path homeassistant/components/my_integration
|
||||
python -m script.hassfest --integration my_integration
|
||||
|
||||
# Validate quality scale
|
||||
# Check quality_scale.yaml against current rules
|
||||
|
3
.github/dependabot.yml
vendored
3
.github/dependabot.yml
vendored
@@ -6,6 +6,3 @@ updates:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- dependency
|
||||
- github_actions
|
||||
|
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -324,7 +324,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
uses: sigstore/cosign-installer@v3.9.1
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.29.4
|
||||
uses: github/codeql-action/init@v3.29.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.29.4
|
||||
uses: github/codeql-action/analyze@v3.29.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v1.2.3
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v1.2.3
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
@@ -377,7 +377,6 @@ homeassistant.components.onedrive.*
|
||||
homeassistant.components.onewire.*
|
||||
homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
homeassistant.components.open_router.*
|
||||
homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
@@ -536,7 +535,6 @@ homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptime_kuma.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
|
12
CODEOWNERS
generated
12
CODEOWNERS
generated
@@ -684,8 +684,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/husqvarna_automower/ @Thomas55555
|
||||
/homeassistant/components/husqvarna_automower_ble/ @alistair23
|
||||
/tests/components/husqvarna_automower_ble/ @alistair23
|
||||
/homeassistant/components/huum/ @frwickst @vincentwolsink
|
||||
/tests/components/huum/ @frwickst @vincentwolsink
|
||||
/homeassistant/components/huum/ @frwickst
|
||||
/tests/components/huum/ @frwickst
|
||||
/homeassistant/components/hvv_departures/ @vigonotion
|
||||
/tests/components/hvv_departures/ @vigonotion
|
||||
/homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
|
||||
@@ -1102,8 +1102,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/onvif/ @hunterjm @jterrace
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
/tests/components/openai_conversation/ @balloob
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
@@ -1660,8 +1658,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/upnp/ @StevenLooman
|
||||
/homeassistant/components/uptime/ @frenck
|
||||
/tests/components/uptime/ @frenck
|
||||
/homeassistant/components/uptime_kuma/ @tr4nt0r
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
@@ -1760,8 +1756,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/wirelesstag/ @sergeymaysak
|
||||
/homeassistant/components/withings/ @joostlek
|
||||
/tests/components/withings/ @joostlek
|
||||
/homeassistant/components/wiz/ @sbidy @arturpragacz
|
||||
/tests/components/wiz/ @sbidy @arturpragacz
|
||||
/homeassistant/components/wiz/ @sbidy
|
||||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
|
@@ -332,9 +332,6 @@ async def async_setup_hass(
|
||||
if not is_virtual_env():
|
||||
await async_mount_local_lib_path(runtime_config.config_dir)
|
||||
|
||||
if hass.config.safe_mode:
|
||||
_LOGGER.info("Starting in safe mode")
|
||||
|
||||
basic_setup_success = (
|
||||
await async_from_config_dict(config_dict, hass) is not None
|
||||
)
|
||||
@@ -387,6 +384,8 @@ async def async_setup_hass(
|
||||
{"recovery_mode": {}, "http": http_conf},
|
||||
hass,
|
||||
)
|
||||
elif hass.config.safe_mode:
|
||||
_LOGGER.info("Starting in safe mode")
|
||||
|
||||
if runtime_config.open_ui:
|
||||
hass.add_job(open_hass_ui, hass)
|
||||
@@ -695,10 +694,10 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
|
||||
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||
"""Get domains of components to set up."""
|
||||
# The common config section [homeassistant] could be filtered here,
|
||||
# but that is not necessary, since it corresponds to the core integration,
|
||||
# that is always unconditionally loaded.
|
||||
domains = {cv.domain_key(key) for key in config}
|
||||
# Filter out the repeating and common config section [homeassistant]
|
||||
domains = {
|
||||
domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
|
||||
}
|
||||
|
||||
# Add config entry and default domains
|
||||
if not hass.config.recovery_mode:
|
||||
@@ -726,28 +725,34 @@ async def _async_resolve_domains_and_preload(
|
||||
together with all their dependencies.
|
||||
"""
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
|
||||
# Also process all base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||
# so they can be setup first instead of discovering them later when a config
|
||||
# entry setup task notices that it's needed and there is already a long line
|
||||
# to use the import executor.
|
||||
#
|
||||
# Additionally process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
# For example if we have
|
||||
# sensor:
|
||||
# - platform: template
|
||||
#
|
||||
# `template` has to be loaded to validate the config for sensor.
|
||||
# The more platforms under `sensor:`, the longer
|
||||
# `template` has to be loaded to validate the config for sensor
|
||||
# so we want to start loading `sensor` as soon as we know
|
||||
# it will be needed. The more platforms under `sensor:`, the longer
|
||||
# it will take to finish setup for `sensor` because each of these
|
||||
# platforms has to be imported before we can validate the config.
|
||||
#
|
||||
# Thankfully we are migrating away from the platform pattern
|
||||
# so this will be less of a problem in the future.
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Additionally process base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
# Also process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
additional_domains_to_process = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
@@ -865,9 +870,9 @@ async def _async_set_up_integrations(
|
||||
domains = set(integrations) & all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s\nDependencies: %s",
|
||||
domains or "{}",
|
||||
(all_domains - domains) or "{}",
|
||||
"Domains to be set up: %s | %s",
|
||||
domains,
|
||||
all_domains - domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
@@ -908,13 +913,12 @@ async def _async_set_up_integrations(
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s; already set up: %s\n"
|
||||
"Dependencies: %s; already set up: %s",
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
name,
|
||||
stage_domains,
|
||||
(stage_domains_unfiltered - stage_domains) or "{}",
|
||||
stage_dep_domains or "{}",
|
||||
(stage_dep_domains_unfiltered - stage_dep_domains) or "{}",
|
||||
stage_domains_unfiltered - stage_domains,
|
||||
stage_dep_domains,
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
)
|
||||
|
||||
if timeout is None:
|
||||
|
@@ -20,7 +20,6 @@ from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
|
||||
from .const import (
|
||||
ATTR_ATTACHMENTS,
|
||||
ATTR_INSTRUCTIONS,
|
||||
ATTR_REQUIRED,
|
||||
ATTR_STRUCTURE,
|
||||
@@ -93,9 +92,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
|
||||
_validate_structure_fields,
|
||||
),
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
|
@@ -23,7 +23,6 @@ ATTR_INSTRUCTIONS: Final = "instructions"
|
||||
ATTR_TASK_NAME: Final = "task_name"
|
||||
ATTR_STRUCTURE: Final = "structure"
|
||||
ATTR_REQUIRED: Final = "required"
|
||||
ATTR_ATTACHMENTS: Final = "attachments"
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a Home Assistant expert and help users with their tasks."
|
||||
@@ -35,6 +34,3 @@ class AITaskEntityFeature(IntFlag):
|
||||
|
||||
GENERATE_DATA = 1
|
||||
"""Generate data based on instructions."""
|
||||
|
||||
SUPPORT_ATTACHMENTS = 2
|
||||
"""Support attachments with generate data."""
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.components.conversation import (
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -56,12 +56,12 @@ class AITaskEntity(RestoreEntity):
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_session(self.hass) as session,
|
||||
async_get_chat_log(
|
||||
self.hass,
|
||||
session,
|
||||
@@ -79,22 +79,19 @@ class AITaskEntity(RestoreEntity):
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(
|
||||
UserContent(task.instructions, attachments=task.attachments)
|
||||
)
|
||||
chat_log.async_add_user_content(UserContent(task.instructions))
|
||||
|
||||
yield chat_log
|
||||
|
||||
@final
|
||||
async def internal_async_generate_data(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a gen data task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
||||
return await self._async_generate_data(task, chat_log)
|
||||
|
||||
async def _async_generate_data(
|
||||
|
@@ -1,9 +1,8 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"dependencies": ["conversation"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal"
|
||||
|
@@ -10,24 +10,16 @@ generate_data:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
entity_id:
|
||||
required: false
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
structure:
|
||||
advanced: true
|
||||
required: false
|
||||
example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }'
|
||||
selector:
|
||||
object:
|
||||
attachments:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
|
@@ -19,10 +19,6 @@
|
||||
"structure": {
|
||||
"name": "Structured output",
|
||||
"description": "When set, the AI Task will output fields with this in structure. The structure is a dictionary where the keys are the field names and the values contain a 'description', a 'selector', and an optional 'required' field."
|
||||
},
|
||||
"attachments": {
|
||||
"name": "Attachments",
|
||||
"description": "List of files to attach for multi-modal AI analysis."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -3,32 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
|
||||
|
||||
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
@@ -36,7 +20,6 @@ async def async_generate_data(
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
@@ -54,80 +37,13 @@ async def async_generate_data(
|
||||
f"AI Task entity {entity_id} does not support generating data"
|
||||
)
|
||||
|
||||
# Resolve attachments
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
if media.path is None:
|
||||
raise HomeAssistantError(
|
||||
"Only local attachments are currently supported"
|
||||
)
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=media.mime_type,
|
||||
path=media.path,
|
||||
)
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
if created_files:
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
return await entity.internal_async_generate_data(
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
@@ -143,9 +59,6 @@ class GenDataTask:
|
||||
structure: vol.Schema | None = None
|
||||
"""Optional structure for the data to be generated."""
|
||||
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
|
@@ -6,7 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["airgradient==0.9.2"],
|
||||
"zeroconf": ["_airgradient._tcp.local."]
|
||||
}
|
||||
|
@@ -14,9 +14,9 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -34,7 +34,7 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
@@ -43,19 +43,23 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: done
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
@@ -45,6 +45,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bo
|
||||
# Store Entity and Initialize Platforms
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
# Listen for option changes
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Clean up unused device entries with no entities
|
||||
@@ -85,3 +88,8 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithReload,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -126,7 +126,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return AirNowOptionsFlowHandler()
|
||||
|
||||
|
||||
class AirNowOptionsFlowHandler(OptionsFlowWithReload):
|
||||
class AirNowOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an options flow for AirNow."""
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -6,5 +6,6 @@ CONF_RETURN_AVERAGE: Final = "return_average"
|
||||
CONF_CLIP_NEGATIVE: Final = "clip_negatives"
|
||||
DOMAIN: Final = "airq"
|
||||
MANUFACTURER: Final = "CorantGmbH"
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
|
||||
UPDATE_INTERVAL: float = 10.0
|
||||
|
@@ -4,6 +4,9 @@
|
||||
"health_index": {
|
||||
"default": "mdi:heart-pulse"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"oxygen": {
|
||||
"default": "mdi:leaf"
|
||||
},
|
||||
|
@@ -14,7 +14,6 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
@@ -29,7 +28,10 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirQConfigEntry, AirQCoordinator
|
||||
from .const import ACTIVITY_BECQUEREL_PER_CUBIC_METER
|
||||
from .const import (
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER,
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -193,7 +195,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="humidity_abs",
|
||||
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
|
||||
translation_key="absolute_humidity",
|
||||
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("humidity_abs"),
|
||||
|
@@ -93,6 +93,9 @@
|
||||
"health_index": {
|
||||
"name": "Health index"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"name": "Absolute humidity"
|
||||
},
|
||||
"hydrogen": {
|
||||
"name": "Hydrogen"
|
||||
},
|
||||
|
@@ -45,8 +45,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
errors = {}
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
await airthings.get_token(
|
||||
@@ -62,6 +60,9 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(title="Airthings", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -150,7 +150,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
entities = [
|
||||
AirthingsDeviceSensor(
|
||||
AirthingsHeaterEnergySensor(
|
||||
coordinator,
|
||||
airthings_device,
|
||||
SENSORS[sensor_types],
|
||||
@@ -162,7 +162,7 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AirthingsDeviceSensor(
|
||||
class AirthingsHeaterEnergySensor(
|
||||
CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Representation of a Airthings Sensor device."""
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.7.0"]
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
}
|
||||
|
@@ -505,13 +505,8 @@ class ClimateCapabilities(AlexaEntity):
|
||||
):
|
||||
yield AlexaThermostatController(self.hass, self.entity)
|
||||
yield AlexaTemperatureSensor(self.hass, self.entity)
|
||||
if (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
and self.entity.attributes.get(water_heater.ATTR_OPERATION_LIST)
|
||||
if self.entity.domain == water_heater.DOMAIN and (
|
||||
supported_features & water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity,
|
||||
@@ -639,9 +634,7 @@ class FanCapabilities(AlexaEntity):
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}"
|
||||
)
|
||||
force_range_controller = False
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE and self.entity.attributes.get(
|
||||
fan.ATTR_PRESET_MODES
|
||||
):
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE:
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}"
|
||||
)
|
||||
@@ -679,11 +672,7 @@ class RemoteCapabilities(AlexaEntity):
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
|
||||
if (
|
||||
activities
|
||||
and (supported & remote.RemoteEntityFeature.ACTIVITY)
|
||||
and self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
|
||||
):
|
||||
if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
|
||||
)
|
||||
@@ -703,9 +692,7 @@ class HumidifierCapabilities(AlexaEntity):
|
||||
"""Yield the supported interfaces."""
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
if (
|
||||
supported & humidifier.HumidifierEntityFeature.MODES
|
||||
) and self.entity.attributes.get(humidifier.ATTR_AVAILABLE_MODES):
|
||||
if supported & humidifier.HumidifierEntityFeature.MODES:
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{humidifier.DOMAIN}.{humidifier.ATTR_MODE}"
|
||||
)
|
||||
|
@@ -6,12 +6,7 @@ from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
WrongCountry,
|
||||
)
|
||||
from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect, WrongCountry
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -62,8 +57,6 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
except WrongCountry:
|
||||
errors["base"] = "wrong_country"
|
||||
else:
|
||||
@@ -113,8 +106,6 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
|
@@ -52,18 +52,8 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
return await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotRetrieveData as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except (CannotConnect, CannotRetrieveData) as err:
|
||||
raise UpdateFailed(f"Error occurred while updating {self.name}") from err
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioamazondevices==3.5.1"]
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.2.3"]
|
||||
}
|
||||
|
@@ -28,31 +28,33 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: all tests missing
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Network information not relevant
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: There are a ton of mac address ranges in use, but also by kindles which are not supported by this integration
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
|
@@ -43,7 +43,6 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"cannot_retrieve_data": "Unable to retrieve data from Amazon. Please try again later.",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"wrong_country": "Wrong country selected. Please select the country where your Amazon account is registered.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
@@ -85,10 +84,10 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect_with_error": {
|
||||
"cannot_connect": {
|
||||
"message": "Error connecting: {error}"
|
||||
},
|
||||
"cannot_retrieve_data_with_error": {
|
||||
"cannot_retrieve_data": {
|
||||
"message": "Error retrieving data: {error}"
|
||||
}
|
||||
}
|
||||
|
@@ -26,14 +26,14 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect_with_error",
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotRetrieveData as err:
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_key="cannot_retrieve_data",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
|
@@ -2,22 +2,11 @@
|
||||
|
||||
import amberelectric
|
||||
|
||||
from homeassistant.components.sensor import ConfigType
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||
from .const import CONF_SITE_ID, PLATFORMS
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .services import setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amber component."""
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
|
||||
|
@@ -1,24 +1,14 @@
|
||||
"""Amber Electric Constants."""
|
||||
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN: Final = "amberelectric"
|
||||
DOMAIN = "amberelectric"
|
||||
CONF_SITE_NAME = "site_name"
|
||||
CONF_SITE_ID = "site_id"
|
||||
|
||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
ATTR_CHANNEL_TYPE = "channel_type"
|
||||
|
||||
ATTRIBUTION = "Data provided by Amber Electric"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
SERVICE_GET_FORECASTS = "get_forecasts"
|
||||
|
||||
GENERAL_CHANNEL = "general"
|
||||
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||
FEED_IN_CHANNEL = "feed_in"
|
||||
|
@@ -10,6 +10,7 @@ from amberelectric.models.actual_interval import ActualInterval
|
||||
from amberelectric.models.channel import ChannelType
|
||||
from amberelectric.models.current_interval import CurrentInterval
|
||||
from amberelectric.models.forecast_interval import ForecastInterval
|
||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||
from amberelectric.rest import ApiException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -17,7 +18,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER
|
||||
from .helpers import normalize_descriptor
|
||||
|
||||
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
||||
|
||||
@@ -49,6 +49,27 @@ def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) ->
|
||||
return interval.channel_type == ChannelType.FEEDIN
|
||||
|
||||
|
||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||
if descriptor is None:
|
||||
return None
|
||||
if descriptor.value == "spike":
|
||||
return "spike"
|
||||
if descriptor.value == "high":
|
||||
return "high"
|
||||
if descriptor.value == "neutral":
|
||||
return "neutral"
|
||||
if descriptor.value == "low":
|
||||
return "low"
|
||||
if descriptor.value == "veryLow":
|
||||
return "very_low"
|
||||
if descriptor.value == "extremelyLow":
|
||||
return "extremely_low"
|
||||
if descriptor.value == "negative":
|
||||
return "negative"
|
||||
return None
|
||||
|
||||
|
||||
class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
|
||||
|
||||
@@ -82,7 +103,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"grid": {},
|
||||
}
|
||||
try:
|
||||
data = self._api.get_current_prices(self.site_id, next=288)
|
||||
data = self._api.get_current_prices(self.site_id, next=48)
|
||||
intervals = [interval.actual_instance for interval in data]
|
||||
except ApiException as api_exception:
|
||||
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
||||
|
@@ -1,25 +0,0 @@
|
||||
"""Formatting helpers used to convert things."""
|
||||
|
||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||
|
||||
DESCRIPTOR_MAP: dict[str, str] = {
|
||||
PriceDescriptor.SPIKE: "spike",
|
||||
PriceDescriptor.HIGH: "high",
|
||||
PriceDescriptor.NEUTRAL: "neutral",
|
||||
PriceDescriptor.LOW: "low",
|
||||
PriceDescriptor.VERYLOW: "very_low",
|
||||
PriceDescriptor.EXTREMELYLOW: "extremely_low",
|
||||
PriceDescriptor.NEGATIVE: "negative",
|
||||
}
|
||||
|
||||
|
||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||
if descriptor in DESCRIPTOR_MAP:
|
||||
return DESCRIPTOR_MAP[descriptor]
|
||||
return None
|
||||
|
||||
|
||||
def format_cents_to_dollars(cents: float) -> float:
|
||||
"""Return a formatted conversion from cents to dollars."""
|
||||
return round(cents / 100, 2)
|
@@ -22,10 +22,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_forecasts": {
|
||||
"service": "mdi:transmission-tower"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -23,12 +23,16 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTRIBUTION
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator, normalize_descriptor
|
||||
|
||||
UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||
|
||||
|
||||
def format_cents_to_dollars(cents: float) -> float:
|
||||
"""Return a formatted conversion from cents to dollars."""
|
||||
return round(cents / 100, 2)
|
||||
|
||||
|
||||
def friendly_channel_type(channel_type: str) -> str:
|
||||
"""Return a human readable version of the channel type."""
|
||||
if channel_type == "controlled_load":
|
||||
|
@@ -1,121 +0,0 @@
|
||||
"""Amber Electric Service class."""
|
||||
|
||||
from amberelectric.models.channel import ChannelType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
from .const import (
|
||||
ATTR_CHANNEL_TYPE,
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
CONTROLLED_LOAD_CHANNEL,
|
||||
DOMAIN,
|
||||
FEED_IN_CHANNEL,
|
||||
GENERAL_CHANNEL,
|
||||
SERVICE_GET_FORECASTS,
|
||||
)
|
||||
from .coordinator import AmberConfigEntry
|
||||
from .helpers import format_cents_to_dollars, normalize_descriptor
|
||||
|
||||
GET_FORECASTS_SCHEMA = vol.Schema(
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: ConfigEntrySelector({"integration": DOMAIN}),
|
||||
ATTR_CHANNEL_TYPE: vol.In(
|
||||
[GENERAL_CHANNEL, CONTROLLED_LOAD_CHANNEL, FEED_IN_CHANNEL]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def async_get_entry(hass: HomeAssistant, config_entry_id: str) -> AmberConfigEntry:
|
||||
"""Get the Amber config entry."""
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": config_entry_id},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
"""Return an array of forecasts."""
|
||||
results: list[JsonValueType] = []
|
||||
|
||||
if channel_type not in data["forecasts"]:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="channel_not_found",
|
||||
translation_placeholders={"channel_type": channel_type},
|
||||
)
|
||||
|
||||
intervals = data["forecasts"][channel_type]
|
||||
|
||||
for interval in intervals:
|
||||
datum = {}
|
||||
datum["duration"] = interval.duration
|
||||
datum["date"] = interval.var_date.isoformat()
|
||||
datum["nem_date"] = interval.nem_time.isoformat()
|
||||
datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
|
||||
if interval.channel_type == ChannelType.FEEDIN:
|
||||
datum["per_kwh"] = datum["per_kwh"] * -1
|
||||
datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
|
||||
datum["start_time"] = interval.start_time.isoformat()
|
||||
datum["end_time"] = interval.end_time.isoformat()
|
||||
datum["renewables"] = round(interval.renewables)
|
||||
datum["spike_status"] = interval.spike_status.value
|
||||
datum["descriptor"] = normalize_descriptor(interval.descriptor)
|
||||
|
||||
if interval.range is not None:
|
||||
datum["range_min"] = format_cents_to_dollars(interval.range.min)
|
||||
datum["range_max"] = format_cents_to_dollars(interval.range.max)
|
||||
|
||||
if interval.advanced_price is not None:
|
||||
multiplier = -1 if interval.channel_type == ChannelType.FEEDIN else 1
|
||||
datum["advanced_price_low"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.low
|
||||
)
|
||||
datum["advanced_price_predicted"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.predicted
|
||||
)
|
||||
datum["advanced_price_high"] = multiplier * format_cents_to_dollars(
|
||||
interval.advanced_price.high
|
||||
)
|
||||
|
||||
results.append(datum)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amber integration."""
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
channel_type = call.data[ATTR_CHANNEL_TYPE]
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
coordinator = entry.runtime_data
|
||||
forecasts = get_forecasts(channel_type, coordinator.data)
|
||||
return {"forecasts": forecasts}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_FORECASTS,
|
||||
handle_get_forecasts,
|
||||
GET_FORECASTS_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
@@ -1,16 +0,0 @@
|
||||
get_forecasts:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: amberelectric
|
||||
channel_type:
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- general
|
||||
- controlled_load
|
||||
- feed_in
|
||||
translation_key: channel_type
|
@@ -1,61 +1,25 @@
|
||||
{
|
||||
"config": {
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"site": {
|
||||
"data": {
|
||||
"site_id": "Site NMI",
|
||||
"site_name": "Site name"
|
||||
},
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
"site_id": "Site ID"
|
||||
},
|
||||
"description": "Go to {api_url} to generate an API key"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_forecasts": {
|
||||
"name": "Get price forecasts",
|
||||
"description": "Retrieves price forecasts from Amber Electric for a site.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The config entry of the site to get forecasts for.",
|
||||
"name": "Config entry"
|
||||
},
|
||||
"site": {
|
||||
"data": {
|
||||
"site_id": "Site NMI",
|
||||
"site_name": "Site Name"
|
||||
},
|
||||
"channel_type": {
|
||||
"name": "Channel type",
|
||||
"description": "The channel to get forecasts for."
|
||||
}
|
||||
"description": "Select the NMI of the site you would like to add"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"integration_not_found": {
|
||||
"message": "Config entry \"{target}\" not found in registry."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"channel_not_found": {
|
||||
"message": "There is no {channel_type} channel at this site."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"channel_type": {
|
||||
"options": {
|
||||
"general": "General",
|
||||
"controlled_load": "Controlled load",
|
||||
"feed_in": "Feed-in"
|
||||
}
|
||||
"error": {
|
||||
"invalid_api_token": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"no_site": "No site provided",
|
||||
"unknown_error": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["amcrest"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["amcrest==1.9.9"]
|
||||
"requirements": ["amcrest==1.9.8"]
|
||||
}
|
||||
|
@@ -14,7 +14,6 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .analytics import Analytics
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
||||
from .http import AnalyticsDevicesView
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
@@ -56,8 +55,6 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_analytics)
|
||||
websocket_api.async_register_command(hass, websocket_analytics_preferences)
|
||||
|
||||
hass.http.register_view(AnalyticsDevicesView)
|
||||
|
||||
hass.data[DATA_COMPONENT] = analytics
|
||||
return True
|
||||
|
||||
|
@@ -27,7 +27,7 @@ from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -77,11 +77,6 @@ from .const import (
|
||||
)
|
||||
|
||||
|
||||
def gen_uuid() -> str:
|
||||
"""Generate a new UUID."""
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsData:
|
||||
"""Analytics data."""
|
||||
@@ -189,7 +184,7 @@ class Analytics:
|
||||
return
|
||||
|
||||
if self._data.uuid is None:
|
||||
self._data.uuid = gen_uuid()
|
||||
self._data.uuid = uuid.uuid4().hex
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
|
||||
if self.supervisor:
|
||||
@@ -386,83 +381,3 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
).values():
|
||||
domains.update(platforms)
|
||||
return domains
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return the devices payload."""
|
||||
integrations_without_model_id: set[str] = set()
|
||||
devices: list[dict[str, Any]] = []
|
||||
dev_reg = dr.async_get(hass)
|
||||
# Devices that need via device info set
|
||||
new_indexes: dict[str, int] = {}
|
||||
via_devices: dict[str, str] = {}
|
||||
|
||||
seen_integrations = set()
|
||||
|
||||
for device in dev_reg.devices.values():
|
||||
# Ignore services
|
||||
if device.entry_type:
|
||||
continue
|
||||
|
||||
if not device.primary_config_entry:
|
||||
continue
|
||||
|
||||
config_entry = hass.config_entries.async_get_entry(device.primary_config_entry)
|
||||
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
seen_integrations.add(config_entry.domain)
|
||||
|
||||
if not device.model_id:
|
||||
integrations_without_model_id.add(config_entry.domain)
|
||||
continue
|
||||
|
||||
if not device.manufacturer:
|
||||
continue
|
||||
|
||||
new_indexes[device.id] = len(devices)
|
||||
devices.append(
|
||||
{
|
||||
"integration": config_entry.domain,
|
||||
"manufacturer": device.manufacturer,
|
||||
"model_id": device.model_id,
|
||||
"model": device.model,
|
||||
"sw_version": device.sw_version,
|
||||
"hw_version": device.hw_version,
|
||||
"has_suggested_area": device.suggested_area is not None,
|
||||
"has_configuration_url": device.configuration_url is not None,
|
||||
"via_device": None,
|
||||
}
|
||||
)
|
||||
if device.via_device_id:
|
||||
via_devices[device.id] = device.via_device_id
|
||||
|
||||
for from_device, via_device in via_devices.items():
|
||||
if via_device not in new_indexes:
|
||||
continue
|
||||
devices[new_indexes[from_device]]["via_device"] = new_indexes[via_device]
|
||||
|
||||
integrations = {
|
||||
domain: integration
|
||||
for domain, integration in (
|
||||
await async_get_integrations(hass, seen_integrations)
|
||||
).items()
|
||||
if isinstance(integration, Integration)
|
||||
}
|
||||
|
||||
for device_info in devices:
|
||||
if integration := integrations.get(device_info["integration"]):
|
||||
device_info["is_custom_integration"] = not integration.is_built_in
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
"no_model_id": sorted(
|
||||
[
|
||||
domain
|
||||
for domain in integrations_without_model_id
|
||||
if domain in integrations and integrations[domain].is_built_in
|
||||
]
|
||||
),
|
||||
"devices": devices,
|
||||
}
|
||||
|
@@ -1,27 +0,0 @@
|
||||
"""HTTP endpoints for analytics integration."""
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .analytics import async_devices_payload
|
||||
|
||||
|
||||
class AnalyticsDevicesView(HomeAssistantView):
|
||||
"""View to handle analytics devices payload download requests."""
|
||||
|
||||
url = "/api/analytics/devices"
|
||||
name = "api:analytics:devices"
|
||||
|
||||
@require_admin
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Return analytics devices payload as JSON."""
|
||||
hass: HomeAssistant = request.app[KEY_HASS]
|
||||
payload = await async_devices_payload(hass)
|
||||
return self.json(
|
||||
payload,
|
||||
headers={
|
||||
"Content-Disposition": "attachment; filename=analytics_devices.json"
|
||||
},
|
||||
)
|
@@ -3,7 +3,7 @@
|
||||
"name": "Analytics",
|
||||
"after_dependencies": ["energy", "hassio", "recorder"],
|
||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
||||
"dependencies": ["api", "websocket_api", "http"],
|
||||
"dependencies": ["api", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
|
@@ -55,6 +55,7 @@ async def async_setup_entry(
|
||||
entry.runtime_data = AnalyticsInsightsData(coordinator=coordinator, names=names)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -64,3 +65,10 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(
|
||||
hass: HomeAssistant, entry: AnalyticsInsightsConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@@ -11,11 +11,7 @@ from python_homeassistant_analytics import (
|
||||
from python_homeassistant_analytics.models import Environment, IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
@@ -133,7 +129,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithReload):
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle Homeassistant Analytics options."""
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -68,6 +68,7 @@ async def async_setup_entry(
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
entry.async_on_unload(api.disconnect)
|
||||
|
||||
return True
|
||||
@@ -79,3 +80,13 @@ async def async_unload_entry(
|
||||
"""Unload a config entry."""
|
||||
_LOGGER.debug("async_unload_entry: %s", entry.data)
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_update_options(
|
||||
hass: HomeAssistant, entry: AndroidTVRemoteConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
_LOGGER.debug(
|
||||
"async_update_options: data: %s options: %s", entry.data, entry.options
|
||||
)
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@@ -19,7 +19,7 @@ from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithReload,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
@@ -116,10 +116,10 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
pin = user_input["pin"]
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
await self.hass.config_entries.async_reload(
|
||||
self._get_reauth_entry().entry_id
|
||||
)
|
||||
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
@@ -243,7 +243,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return AndroidTVRemoteOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlow):
|
||||
"""Android TV Remote options flow."""
|
||||
|
||||
def __init__(self, config_entry: AndroidTVRemoteConfigEntry) -> None:
|
||||
|
@@ -27,4 +27,4 @@ def create_api(hass: HomeAssistant, host: str, enable_ime: bool) -> AndroidTVRem
|
||||
|
||||
def get_enable_ime(entry: AndroidTVRemoteConfigEntry) -> bool:
|
||||
"""Get value of enable_ime option or its default value."""
|
||||
return entry.options.get(CONF_ENABLE_IME, CONF_ENABLE_IME_DEFAULT_VALUE) # type: ignore[no-any-return]
|
||||
return entry.options.get(CONF_ENABLE_IME, CONF_ENABLE_IME_DEFAULT_VALUE)
|
||||
|
@@ -10,7 +10,7 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"cannot_receive_deviceinfo": "Failed to retrieve MAC address. Make sure the device is turned on"
|
||||
"cannot_receive_deviceinfo": "Failed to retrieve MAC Address. Make sure the device is turned on"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
|
@@ -10,9 +10,9 @@ DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-5-haiku-latest"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-haiku-20240307"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
RECOMMENDED_MAX_TOKENS = 1024
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
|
@@ -6,6 +6,7 @@ from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
@@ -71,4 +72,13 @@ class AnthropicConversationEntity(
|
||||
|
||||
await self._async_handle_chat_log(chat_log)
|
||||
|
||||
return conversation.async_get_result_from_chat_log(user_input, chat_log)
|
||||
response_content = chat_log.content[-1]
|
||||
if not isinstance(response_content, conversation.AssistantContent):
|
||||
raise TypeError("Last message must be an assistant message")
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
@@ -311,13 +311,11 @@ def _create_token_stats(
|
||||
class AnthropicBaseLLMEntity(Entity):
|
||||
"""Anthropic base LLM entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_name = subentry.title
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
|
@@ -29,7 +29,7 @@
|
||||
"set_options": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"prompt": "[%key:common::config_flow::data::prompt%]",
|
||||
"prompt": "Instructions",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.1"],
|
||||
"requirements": ["pyatv==0.16.0"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["arcam"],
|
||||
"requirements": ["arcam-fmj==1.8.2"],
|
||||
"requirements": ["arcam-fmj==1.8.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
@@ -38,6 +38,8 @@ from .pipeline import (
|
||||
async_create_default_pipeline,
|
||||
async_get_pipeline,
|
||||
async_get_pipelines,
|
||||
async_migrate_engine,
|
||||
async_run_migrations,
|
||||
async_setup_pipeline_store,
|
||||
async_update_pipeline,
|
||||
)
|
||||
@@ -59,6 +61,7 @@ __all__ = (
|
||||
"WakeWordSettings",
|
||||
"async_create_default_pipeline",
|
||||
"async_get_pipelines",
|
||||
"async_migrate_engine",
|
||||
"async_pipeline_from_audio_stream",
|
||||
"async_setup",
|
||||
"async_update_pipeline",
|
||||
@@ -84,6 +87,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.data[DATA_LAST_WAKE_UP] = {}
|
||||
|
||||
await async_setup_pipeline_store(hass)
|
||||
await async_run_migrations(hass)
|
||||
async_register_websocket_api(hass)
|
||||
|
||||
return True
|
||||
|
@@ -3,6 +3,7 @@
|
||||
DOMAIN = "assist_pipeline"
|
||||
|
||||
DATA_CONFIG = f"{DOMAIN}.config"
|
||||
DATA_MIGRATIONS = f"{DOMAIN}_migrations"
|
||||
|
||||
DEFAULT_PIPELINE_TIMEOUT = 60 * 5 # seconds
|
||||
|
||||
|
@@ -13,7 +13,7 @@ from pathlib import Path
|
||||
from queue import Empty, Queue
|
||||
from threading import Thread
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from typing import TYPE_CHECKING, Any, Literal, cast
|
||||
import wave
|
||||
|
||||
import hass_nabucasa
|
||||
@@ -49,6 +49,7 @@ from .const import (
|
||||
CONF_DEBUG_RECORDING_DIR,
|
||||
DATA_CONFIG,
|
||||
DATA_LAST_WAKE_UP,
|
||||
DATA_MIGRATIONS,
|
||||
DOMAIN,
|
||||
MS_PER_CHUNK,
|
||||
SAMPLE_CHANNELS,
|
||||
@@ -2058,6 +2059,50 @@ async def async_setup_pipeline_store(hass: HomeAssistant) -> PipelineData:
|
||||
return PipelineData(pipeline_store)
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_engine(
|
||||
hass: HomeAssistant,
|
||||
engine_type: Literal["conversation", "stt", "tts", "wake_word"],
|
||||
old_value: str,
|
||||
new_value: str,
|
||||
) -> None:
|
||||
"""Register a migration of an engine used in pipelines."""
|
||||
hass.data.setdefault(DATA_MIGRATIONS, {})[engine_type] = (old_value, new_value)
|
||||
|
||||
# Run migrations when config is already loaded
|
||||
if DATA_CONFIG in hass.data:
|
||||
hass.async_create_background_task(
|
||||
async_run_migrations(hass), "assist_pipeline_migration", eager_start=True
|
||||
)
|
||||
|
||||
|
||||
async def async_run_migrations(hass: HomeAssistant) -> None:
|
||||
"""Run pipeline migrations."""
|
||||
if not (migrations := hass.data.get(DATA_MIGRATIONS)):
|
||||
return
|
||||
|
||||
engine_attr = {
|
||||
"conversation": "conversation_engine",
|
||||
"stt": "stt_engine",
|
||||
"tts": "tts_engine",
|
||||
"wake_word": "wake_word_entity",
|
||||
}
|
||||
|
||||
updates = []
|
||||
|
||||
for pipeline in async_get_pipelines(hass):
|
||||
attr_updates = {}
|
||||
for engine_type, (old_value, new_value) in migrations.items():
|
||||
if getattr(pipeline, engine_attr[engine_type]) == old_value:
|
||||
attr_updates[engine_attr[engine_type]] = new_value
|
||||
|
||||
if attr_updates:
|
||||
updates.append((pipeline, attr_updates))
|
||||
|
||||
for pipeline, attr_updates in updates:
|
||||
await async_update_pipeline(hass, pipeline, **attr_updates)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PipelineConversationData:
|
||||
"""Hold data for the duration of a conversation."""
|
||||
|
@@ -68,10 +68,9 @@ ask_question:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain: assist_satellite
|
||||
supported_features:
|
||||
- assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION
|
||||
domain: assist_satellite
|
||||
supported_features:
|
||||
- assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION
|
||||
question:
|
||||
required: false
|
||||
example: "What kind of music would you like to play?"
|
||||
|
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==3.1.0"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.6.0"]
|
||||
}
|
||||
|
@@ -6,7 +6,6 @@ from datetime import timedelta
|
||||
import logging
|
||||
|
||||
API_CO2 = "carbon_dioxide"
|
||||
API_DEW_POINT = "dew_point"
|
||||
API_DUST = "dust"
|
||||
API_HUMID = "humidity"
|
||||
API_LUX = "illuminance"
|
||||
|
@@ -34,7 +34,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
API_CO2,
|
||||
API_DEW_POINT,
|
||||
API_DUST,
|
||||
API_HUMID,
|
||||
API_LUX,
|
||||
@@ -111,15 +110,6 @@ SENSOR_TYPES: tuple[AwairSensorEntityDescription, ...] = (
|
||||
unique_id_tag="CO2", # matches legacy format
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AwairSensorEntityDescription(
|
||||
key=API_DEW_POINT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
translation_key="dew_point",
|
||||
unique_id_tag="dew_point",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = (
|
||||
|
@@ -57,9 +57,6 @@
|
||||
},
|
||||
"sound_level": {
|
||||
"name": "Sound level"
|
||||
},
|
||||
"dew_point": {
|
||||
"name": "Dew point"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -30,9 +30,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: AxisConfigEntry)
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
hub.setup()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
config_entry.add_update_listener(hub.async_new_address_callback)
|
||||
)
|
||||
config_entry.add_update_listener(hub.async_new_address_callback)
|
||||
config_entry.async_on_unload(hub.teardown)
|
||||
config_entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, hub.shutdown)
|
||||
|
@@ -1 +0,0 @@
|
||||
"""Bauknecht virtual integration."""
|
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "bauknecht",
|
||||
"name": "Bauknecht",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "whirlpool"
|
||||
}
|
@@ -15,31 +15,23 @@ from bluecurrent_api.exceptions import (
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
from homeassistant.const import ATTR_NAME, CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import (
|
||||
CHARGEPOINT_SETTINGS,
|
||||
CHARGEPOINT_STATUS,
|
||||
DOMAIN,
|
||||
EVSE_ID,
|
||||
LOGGER,
|
||||
PLUG_AND_CHARGE,
|
||||
VALUE,
|
||||
)
|
||||
from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE
|
||||
|
||||
type BlueCurrentConfigEntry = ConfigEntry[Connector]
|
||||
|
||||
PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH]
|
||||
PLATFORMS = [Platform.BUTTON, Platform.SENSOR]
|
||||
CHARGE_POINTS = "CHARGE_POINTS"
|
||||
DATA = "data"
|
||||
DELAY = 5
|
||||
|
||||
GRID = "GRID"
|
||||
OBJECT = "object"
|
||||
VALUE_TYPES = [CHARGEPOINT_STATUS, CHARGEPOINT_SETTINGS]
|
||||
VALUE_TYPES = ["CH_STATUS"]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -102,7 +94,7 @@ class Connector:
|
||||
elif object_name in VALUE_TYPES:
|
||||
value_data: dict = message[DATA]
|
||||
evse_id = value_data.pop(EVSE_ID)
|
||||
self.update_charge_point(evse_id, object_name, value_data)
|
||||
self.update_charge_point(evse_id, value_data)
|
||||
|
||||
# gets grid key / values
|
||||
elif GRID in object_name:
|
||||
@@ -114,37 +106,26 @@ class Connector:
|
||||
"""Handle incoming chargepoint data."""
|
||||
await asyncio.gather(
|
||||
*(
|
||||
self.handle_charge_point(entry[EVSE_ID], entry)
|
||||
self.handle_charge_point(
|
||||
entry[EVSE_ID], entry[MODEL_TYPE], entry[ATTR_NAME]
|
||||
)
|
||||
for entry in charge_points_data
|
||||
),
|
||||
self.client.get_grid_status(charge_points_data[0][EVSE_ID]),
|
||||
)
|
||||
|
||||
async def handle_charge_point(
|
||||
self, evse_id: str, charge_point: dict[str, Any]
|
||||
) -> None:
|
||||
async def handle_charge_point(self, evse_id: str, model: str, name: str) -> None:
|
||||
"""Add the chargepoint and request their data."""
|
||||
self.add_charge_point(evse_id, charge_point)
|
||||
self.add_charge_point(evse_id, model, name)
|
||||
await self.client.get_status(evse_id)
|
||||
|
||||
def add_charge_point(self, evse_id: str, charge_point: dict[str, Any]) -> None:
|
||||
def add_charge_point(self, evse_id: str, model: str, name: str) -> None:
|
||||
"""Add a charge point to charge_points."""
|
||||
self.charge_points[evse_id] = charge_point
|
||||
self.charge_points[evse_id] = {MODEL_TYPE: model, ATTR_NAME: name}
|
||||
|
||||
def update_charge_point(self, evse_id: str, update_type: str, data: dict) -> None:
|
||||
def update_charge_point(self, evse_id: str, data: dict) -> None:
|
||||
"""Update the charge point data."""
|
||||
charge_point = self.charge_points[evse_id]
|
||||
if update_type == CHARGEPOINT_SETTINGS:
|
||||
# Update the plug and charge object. The library parses this object to a bool instead of an object.
|
||||
plug_and_charge = charge_point.get(PLUG_AND_CHARGE)
|
||||
if plug_and_charge is not None:
|
||||
plug_and_charge[VALUE] = data[PLUG_AND_CHARGE]
|
||||
|
||||
# Remove the plug and charge object from the data list before updating.
|
||||
del data[PLUG_AND_CHARGE]
|
||||
|
||||
charge_point.update(data)
|
||||
|
||||
self.charge_points[evse_id].update(data)
|
||||
self.dispatch_charge_point_update_signal(evse_id)
|
||||
|
||||
def dispatch_charge_point_update_signal(self, evse_id: str) -> None:
|
||||
|
@@ -8,14 +8,3 @@ LOGGER = logging.getLogger(__package__)
|
||||
|
||||
EVSE_ID = "evse_id"
|
||||
MODEL_TYPE = "model_type"
|
||||
PLUG_AND_CHARGE = "plug_and_charge"
|
||||
VALUE = "value"
|
||||
PERMISSION = "permission"
|
||||
CHARGEPOINT_STATUS = "CH_STATUS"
|
||||
CHARGEPOINT_SETTINGS = "CH_SETTINGS"
|
||||
BLOCK = "block"
|
||||
UNAVAILABLE = "unavailable"
|
||||
AVAILABLE = "available"
|
||||
LINKED_CHARGE_CARDS = "linked_charge_cards_only"
|
||||
PUBLIC_CHARGING = "public_charging"
|
||||
ACTIVITY = "activity"
|
||||
|
@@ -30,17 +30,6 @@
|
||||
"stop_charge_session": {
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"plug_and_charge": {
|
||||
"default": "mdi:ev-plug-type2"
|
||||
},
|
||||
"linked_charge_cards": {
|
||||
"default": "mdi:account-group"
|
||||
},
|
||||
"block": {
|
||||
"default": "mdi:lock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["bluecurrent_api"],
|
||||
"requirements": ["bluecurrent-api==1.2.4"]
|
||||
"requirements": ["bluecurrent-api==1.2.3"]
|
||||
}
|
||||
|
@@ -124,17 +124,6 @@
|
||||
"reset": {
|
||||
"name": "Reset"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"plug_and_charge": {
|
||||
"name": "Plug & Charge"
|
||||
},
|
||||
"linked_charge_cards_only": {
|
||||
"name": "Linked charging cards only"
|
||||
},
|
||||
"block": {
|
||||
"name": "Block charge point"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,169 +0,0 @@
|
||||
"""Support for Blue Current switches."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import PLUG_AND_CHARGE, BlueCurrentConfigEntry, Connector
|
||||
from .const import (
|
||||
AVAILABLE,
|
||||
BLOCK,
|
||||
LINKED_CHARGE_CARDS,
|
||||
PUBLIC_CHARGING,
|
||||
UNAVAILABLE,
|
||||
VALUE,
|
||||
)
|
||||
from .entity import ChargepointEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BlueCurrentSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Describes a Blue Current switch entity."""
|
||||
|
||||
function: Callable[[Connector, str, bool], Any]
|
||||
|
||||
turn_on_off_fn: Callable[[str, Connector], tuple[bool, bool]]
|
||||
"""Update the switch based on the latest data received from the websocket. The first returned boolean is _attr_is_on, the second one has_value."""
|
||||
|
||||
|
||||
def update_on_value_and_activity(
|
||||
key: str, evse_id: str, connector: Connector, reverse_is_on: bool = False
|
||||
) -> tuple[bool, bool]:
|
||||
"""Return the updated state of the switch based on received chargepoint data and activity."""
|
||||
|
||||
data_object = connector.charge_points[evse_id].get(key)
|
||||
is_on = data_object[VALUE] if data_object is not None else None
|
||||
activity = connector.charge_points[evse_id].get("activity")
|
||||
|
||||
if is_on is not None and activity == AVAILABLE:
|
||||
return is_on if not reverse_is_on else not is_on, True
|
||||
return False, False
|
||||
|
||||
|
||||
def update_block_switch(evse_id: str, connector: Connector) -> tuple[bool, bool]:
|
||||
"""Return the updated data for a block switch."""
|
||||
activity = connector.charge_points[evse_id].get("activity")
|
||||
return activity == UNAVAILABLE, activity in [AVAILABLE, UNAVAILABLE]
|
||||
|
||||
|
||||
def update_charge_point(
|
||||
key: str, evse_id: str, connector: Connector, new_switch_value: bool
|
||||
) -> None:
|
||||
"""Change charge point data when the state of the switch changes."""
|
||||
data_objects = connector.charge_points[evse_id].get(key)
|
||||
if data_objects is not None:
|
||||
data_objects[VALUE] = new_switch_value
|
||||
|
||||
|
||||
async def set_plug_and_charge(connector: Connector, evse_id: str, value: bool) -> None:
|
||||
"""Toggle the plug and charge setting for a specific charging point."""
|
||||
await connector.client.set_plug_and_charge(evse_id, value)
|
||||
update_charge_point(PLUG_AND_CHARGE, evse_id, connector, value)
|
||||
|
||||
|
||||
async def set_linked_charge_cards(
|
||||
connector: Connector, evse_id: str, value: bool
|
||||
) -> None:
|
||||
"""Toggle the plug and charge setting for a specific charging point."""
|
||||
await connector.client.set_linked_charge_cards_only(evse_id, value)
|
||||
update_charge_point(PUBLIC_CHARGING, evse_id, connector, not value)
|
||||
|
||||
|
||||
SWITCHES = (
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
key=PLUG_AND_CHARGE,
|
||||
translation_key=PLUG_AND_CHARGE,
|
||||
function=set_plug_and_charge,
|
||||
turn_on_off_fn=lambda evse_id, connector: (
|
||||
update_on_value_and_activity(PLUG_AND_CHARGE, evse_id, connector)
|
||||
),
|
||||
),
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
key=LINKED_CHARGE_CARDS,
|
||||
translation_key=LINKED_CHARGE_CARDS,
|
||||
function=set_linked_charge_cards,
|
||||
turn_on_off_fn=lambda evse_id, connector: (
|
||||
update_on_value_and_activity(
|
||||
PUBLIC_CHARGING, evse_id, connector, reverse_is_on=True
|
||||
)
|
||||
),
|
||||
),
|
||||
BlueCurrentSwitchEntityDescription(
|
||||
key=BLOCK,
|
||||
translation_key=BLOCK,
|
||||
function=lambda connector, evse_id, value: connector.client.block(
|
||||
evse_id, value
|
||||
),
|
||||
turn_on_off_fn=update_block_switch,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: BlueCurrentConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Blue Current switches."""
|
||||
connector = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
ChargePointSwitch(
|
||||
connector,
|
||||
evse_id,
|
||||
switch,
|
||||
)
|
||||
for evse_id in connector.charge_points
|
||||
for switch in SWITCHES
|
||||
)
|
||||
|
||||
|
||||
class ChargePointSwitch(ChargepointEntity, SwitchEntity):
|
||||
"""Base charge point switch."""
|
||||
|
||||
has_value = True
|
||||
entity_description: BlueCurrentSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
connector: Connector,
|
||||
evse_id: str,
|
||||
switch: BlueCurrentSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(connector, evse_id)
|
||||
|
||||
self.key = switch.key
|
||||
self.entity_description = switch
|
||||
self.evse_id = evse_id
|
||||
self._attr_available = True
|
||||
self._attr_unique_id = f"{switch.key}_{evse_id}"
|
||||
|
||||
async def call_function(self, value: bool) -> None:
|
||||
"""Call the function to set setting."""
|
||||
await self.entity_description.function(self.connector, self.evse_id, value)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.call_function(True)
|
||||
self._attr_is_on = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.call_function(False)
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Fetch new state data for the switch."""
|
||||
new_state = self.entity_description.turn_on_off_fn(self.evse_id, self.connector)
|
||||
self._attr_is_on = new_state[0]
|
||||
self.has_value = new_state[1]
|
@@ -15,12 +15,12 @@
|
||||
],
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.0.0",
|
||||
"bluetooth-adapters==2.0.0",
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.2",
|
||||
"habluetooth==4.0.1"
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.49.0"
|
||||
]
|
||||
}
|
||||
|
@@ -8,33 +8,20 @@ from bring_api import Bring
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
BringActivityCoordinator,
|
||||
BringConfigEntry,
|
||||
BringCoordinators,
|
||||
BringDataUpdateCoordinator,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.EVENT, Platform.SENSOR, Platform.TODO]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bring! services."""
|
||||
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BringConfigEntry) -> bool:
|
||||
"""Set up Bring! from a config entry."""
|
||||
|
||||
|
@@ -7,8 +7,5 @@ DOMAIN = "bring"
|
||||
ATTR_SENDER: Final = "sender"
|
||||
ATTR_ITEM_NAME: Final = "item"
|
||||
ATTR_NOTIFICATION_TYPE: Final = "message"
|
||||
ATTR_REACTION: Final = "reaction"
|
||||
ATTR_ACTIVITY: Final = "uuid"
|
||||
ATTR_RECEIVER: Final = "publicUserUuid"
|
||||
|
||||
SERVICE_PUSH_NOTIFICATION = "send_message"
|
||||
SERVICE_ACTIVITY_STREAM_REACTION = "send_reaction"
|
||||
|
@@ -35,9 +35,6 @@
|
||||
"services": {
|
||||
"send_message": {
|
||||
"service": "mdi:cellphone-message"
|
||||
},
|
||||
"send_reaction": {
|
||||
"service": "mdi:thumb-up"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,110 +0,0 @@
|
||||
"""Actions for Bring! integration."""
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bring_api import (
|
||||
ActivityType,
|
||||
BringAuthException,
|
||||
BringNotificationType,
|
||||
BringRequestException,
|
||||
ReactionType,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.event import ATTR_EVENT_TYPE
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
|
||||
from .const import (
|
||||
ATTR_ACTIVITY,
|
||||
ATTR_REACTION,
|
||||
ATTR_RECEIVER,
|
||||
DOMAIN,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION,
|
||||
)
|
||||
from .coordinator import BringConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_ACTIVITY_STREAM_REACTION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_REACTION): vol.All(
|
||||
vol.Upper,
|
||||
vol.Coerce(ReactionType),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> BringConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
if TYPE_CHECKING:
|
||||
assert entry
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_loaded",
|
||||
)
|
||||
return entry
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Bring! integration."""
|
||||
|
||||
async def async_send_activity_stream_reaction(call: ServiceCall) -> None:
|
||||
"""Send a reaction in response to recent activity of a list member."""
|
||||
|
||||
if (
|
||||
not (state := hass.states.get(call.data[ATTR_ENTITY_ID]))
|
||||
or not (entity := er.async_get(hass).async_get(call.data[ATTR_ENTITY_ID]))
|
||||
or not entity.config_entry_id
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entity_not_found",
|
||||
translation_placeholders={
|
||||
ATTR_ENTITY_ID: call.data[ATTR_ENTITY_ID],
|
||||
},
|
||||
)
|
||||
config_entry = get_config_entry(hass, entity.config_entry_id)
|
||||
|
||||
coordinator = config_entry.runtime_data.data
|
||||
|
||||
list_uuid = entity.unique_id.split("_")[1]
|
||||
|
||||
activity = state.attributes[ATTR_EVENT_TYPE]
|
||||
|
||||
reaction: ReactionType = call.data[ATTR_REACTION]
|
||||
|
||||
if not activity:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="activity_not_found",
|
||||
)
|
||||
try:
|
||||
await coordinator.bring.notify(
|
||||
list_uuid,
|
||||
BringNotificationType.LIST_ACTIVITY_STREAM_REACTION,
|
||||
receiver=state.attributes[ATTR_RECEIVER],
|
||||
activity=state.attributes[ATTR_ACTIVITY],
|
||||
activity_type=ActivityType(activity.upper()),
|
||||
reaction=reaction,
|
||||
)
|
||||
except (BringRequestException, BringAuthException) as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="reaction_request_failed",
|
||||
) from e
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION,
|
||||
async_send_activity_stream_reaction,
|
||||
SERVICE_ACTIVITY_STREAM_REACTION_SCHEMA,
|
||||
)
|
@@ -21,28 +21,3 @@ send_message:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
send_reaction:
|
||||
fields:
|
||||
entity_id:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
- integration: bring
|
||||
domain: event
|
||||
example: event.shopping_list
|
||||
reaction:
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- label: 👍🏼
|
||||
value: thumbs_up
|
||||
- label: 🧐
|
||||
value: monocle
|
||||
- label: 🤤
|
||||
value: drooling
|
||||
- label: ❤️
|
||||
value: heart
|
||||
mode: dropdown
|
||||
example: thumbs_up
|
||||
|
@@ -144,19 +144,6 @@
|
||||
},
|
||||
"notify_request_failed": {
|
||||
"message": "Failed to send push notification for Bring! due to a connection error, try again later"
|
||||
},
|
||||
"reaction_request_failed": {
|
||||
"message": "Failed to send reaction for Bring! due to a connection error, try again later"
|
||||
},
|
||||
"activity_not_found": {
|
||||
"message": "Failed to send reaction for Bring! — No recent activity found"
|
||||
},
|
||||
"entity_not_found": {
|
||||
"message": "Failed to send reaction for Bring! — Unknown entity {entity_id}"
|
||||
},
|
||||
|
||||
"entry_not_loaded": {
|
||||
"message": "The account associated with this Bring! list is either not loaded or disabled in Home Assistant."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -177,20 +164,6 @@
|
||||
"description": "Item name(s) to include in an urgent message e.g. 'Attention! Attention! - We still urgently need: [Items]'"
|
||||
}
|
||||
}
|
||||
},
|
||||
"send_reaction": {
|
||||
"name": "Send reaction",
|
||||
"description": "Sends a reaction to a recent activity on a Bring! list by a member of the shared list.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Activities",
|
||||
"description": "Select the Bring! activities event entity for reacting to its most recent event"
|
||||
},
|
||||
"reaction": {
|
||||
"name": "Reaction",
|
||||
"description": "Type of reaction to send in response."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
@@ -11,7 +11,6 @@ DOMAINS_AND_TYPES = {
|
||||
Platform.SELECT: {"HYS"},
|
||||
Platform.SENSOR: {
|
||||
"A1",
|
||||
"A2",
|
||||
"MP1S",
|
||||
"RM4MINI",
|
||||
"RM4PRO",
|
||||
|
@@ -10,7 +10,6 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
PERCENTAGE,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
@@ -35,24 +34,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
key="air_quality",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm10",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm2_5",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
|
@@ -25,7 +25,6 @@ def get_update_manager(device: BroadlinkDevice[_ApiT]) -> BroadlinkUpdateManager
|
||||
"""Return an update manager for a given Broadlink device."""
|
||||
update_managers: dict[str, type[BroadlinkUpdateManager]] = {
|
||||
"A1": BroadlinkA1UpdateManager,
|
||||
"A2": BroadlinkA2UpdateManager,
|
||||
"BG1": BroadlinkBG1UpdateManager,
|
||||
"HYS": BroadlinkThermostatUpdateManager,
|
||||
"LB1": BroadlinkLB1UpdateManager,
|
||||
@@ -119,16 +118,6 @@ class BroadlinkA1UpdateManager(BroadlinkUpdateManager[blk.a1]):
|
||||
return await self.device.async_request(self.device.api.check_sensors_raw)
|
||||
|
||||
|
||||
class BroadlinkA2UpdateManager(BroadlinkUpdateManager[blk.a2]):
|
||||
"""Manages updates for Broadlink A2 devices."""
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
async def async_fetch_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from the device."""
|
||||
return await self.device.async_request(self.device.api.check_sensors_raw)
|
||||
|
||||
|
||||
class BroadlinkMP1UpdateManager(BroadlinkUpdateManager[blk.mp1]):
|
||||
"""Manages updates for Broadlink MP1 devices."""
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==5.0.0"],
|
||||
"requirements": ["brother==4.3.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
@@ -12,7 +12,6 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNA
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_PASSKEY, DEFAULT_PORT, DOMAIN
|
||||
|
||||
@@ -22,15 +21,12 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize BSBLan flow."""
|
||||
self.host: str | None = None
|
||||
self.port: int = DEFAULT_PORT
|
||||
self.mac: str | None = None
|
||||
self.passkey: str | None = None
|
||||
self.username: str | None = None
|
||||
self.password: str | None = None
|
||||
self._auth_required = True
|
||||
host: str
|
||||
port: int
|
||||
mac: str
|
||||
passkey: str | None = None
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -45,111 +41,9 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create()
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle Zeroconf discovery."""
|
||||
|
||||
self.host = str(discovery_info.ip_address)
|
||||
self.port = discovery_info.port or DEFAULT_PORT
|
||||
|
||||
# Get MAC from properties
|
||||
self.mac = discovery_info.properties.get("mac")
|
||||
|
||||
# If MAC was found in zeroconf, use it immediately
|
||||
if self.mac:
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
else:
|
||||
# MAC not available from zeroconf - check for existing host/port first
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: self.host, CONF_PORT: self.port}
|
||||
)
|
||||
|
||||
# Try to get device info without authentication to minimize discovery popup
|
||||
config = BSBLANConfig(host=self.host, port=self.port)
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
try:
|
||||
device = await bsblan.device()
|
||||
except BSBLANError:
|
||||
# Device requires authentication - proceed to discovery confirm
|
||||
self.mac = None
|
||||
else:
|
||||
self.mac = device.MAC
|
||||
|
||||
# Got MAC without auth - set unique ID and check for existing device
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
CONF_PORT: self.port,
|
||||
}
|
||||
)
|
||||
# No auth needed, so we can proceed to a confirmation step without fields
|
||||
self._auth_required = False
|
||||
|
||||
# Proceed to get credentials
|
||||
self.context["title_placeholders"] = {"name": f"BSBLAN {self.host}"}
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle getting credentials for discovered device."""
|
||||
if user_input is None:
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
if not self._auth_required:
|
||||
data_schema = vol.Schema({})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
|
||||
if not self._auth_required:
|
||||
return self._async_create_entry()
|
||||
|
||||
self.passkey = user_input.get(CONF_PASSKEY)
|
||||
self.username = user_input.get(CONF_USERNAME)
|
||||
self.password = user_input.get(CONF_PASSWORD)
|
||||
|
||||
return await self._validate_and_create(is_discovery=True)
|
||||
|
||||
async def _validate_and_create(
|
||||
self, is_discovery: bool = False
|
||||
) -> ConfigFlowResult:
|
||||
"""Validate device connection and create entry."""
|
||||
try:
|
||||
await self._get_bsblan_info(is_discovery=is_discovery)
|
||||
await self._get_bsblan_info()
|
||||
except BSBLANError:
|
||||
if is_discovery:
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_PASSKEY): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
errors={"base": "cannot_connect"},
|
||||
description_placeholders={"host": str(self.host)},
|
||||
)
|
||||
return self._show_setup_form({"base": "cannot_connect"})
|
||||
|
||||
return self._async_create_entry()
|
||||
@@ -173,7 +67,6 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@callback
|
||||
def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create the config entry."""
|
||||
return self.async_create_entry(
|
||||
title=format_mac(self.mac),
|
||||
data={
|
||||
@@ -185,10 +78,8 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
async def _get_bsblan_info(
|
||||
self, raise_on_progress: bool = True, is_discovery: bool = False
|
||||
) -> None:
|
||||
"""Get device information from a BSBLAN device."""
|
||||
async def _get_bsblan_info(self, raise_on_progress: bool = True) -> None:
|
||||
"""Get device information from an BSBLAN device."""
|
||||
config = BSBLANConfig(
|
||||
host=self.host,
|
||||
passkey=self.passkey,
|
||||
@@ -199,18 +90,11 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
device = await bsblan.device()
|
||||
retrieved_mac = device.MAC
|
||||
self.mac = device.MAC
|
||||
|
||||
# Handle unique ID assignment based on whether MAC was available from zeroconf
|
||||
if not self.mac:
|
||||
# MAC wasn't available from zeroconf, now we have it from API
|
||||
self.mac = retrieved_mac
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
|
||||
# Always allow updating host/port for both user and discovery flows
|
||||
# This ensures connectivity is maintained when devices change IP addresses
|
||||
await self.async_set_unique_id(
|
||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||
)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: self.host,
|
||||
|
@@ -7,11 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==2.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
"name": "bsb-lan*"
|
||||
}
|
||||
]
|
||||
"requirements": ["python-bsblan==2.1.0"]
|
||||
}
|
||||
|
@@ -20,8 +20,6 @@ from . import BSBLanConfigEntry, BSBLanData
|
||||
from .coordinator import BSBLanCoordinatorData
|
||||
from .entity import BSBLanEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class BSBLanSensorEntityDescription(SensorEntityDescription):
|
||||
|
@@ -13,25 +13,7 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your BSB-Lan device.",
|
||||
"port": "The port number of your BSB-Lan device.",
|
||||
"passkey": "The passkey for your BSB-Lan device.",
|
||||
"username": "The username for your BSB-Lan device.",
|
||||
"password": "The password for your BSB-Lan device."
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"title": "BSB-Lan device discovered",
|
||||
"description": "A BSB-Lan device was discovered at {host}. Please provide credentials if required.",
|
||||
"data": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
|
||||
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
||||
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
||||
"host": "The hostname or IP address of your BSB-Lan device."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -45,7 +45,7 @@ class BTHomePassiveBluetoothProcessorCoordinator(
|
||||
@property
|
||||
def sleepy_device(self) -> bool:
|
||||
"""Return True if the device is a sleepy device."""
|
||||
return self.entry.data.get(CONF_SLEEPY_DEVICE, self.device_data.sleepy_device) # type: ignore[no-any-return]
|
||||
return self.entry.data.get(CONF_SLEEPY_DEVICE, self.device_data.sleepy_device)
|
||||
|
||||
|
||||
class BTHomePassiveBluetoothDataProcessor[_T](
|
||||
|
@@ -70,7 +70,7 @@ def get_event_classes_by_device_id(hass: HomeAssistant, device_id: str) -> list[
|
||||
bthome_config_entry = next(
|
||||
entry for entry in config_entries if entry and entry.domain == DOMAIN
|
||||
)
|
||||
return bthome_config_entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, []) # type: ignore[no-any-return]
|
||||
return bthome_config_entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, [])
|
||||
|
||||
|
||||
def get_event_types_by_event_class(event_class: str) -> set[str]:
|
||||
|
@@ -10,8 +10,14 @@ import random
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from hass_nabucasa import Cloud, CloudApiError, CloudApiNonRetryableError, CloudError
|
||||
from hass_nabucasa.files import FilesError, StorageType, StoredFile, calculate_b64md5
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.api import CloudApiError, CloudApiNonRetryableError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
FilesHandlerListEntry,
|
||||
async_files_delete_file,
|
||||
async_files_list,
|
||||
)
|
||||
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
@@ -180,7 +186,8 @@ class CloudBackupAgent(BackupAgent):
|
||||
"""
|
||||
backup = await self._async_get_backup(backup_id)
|
||||
try:
|
||||
await self._cloud.files.delete(
|
||||
await async_files_delete_file(
|
||||
self._cloud,
|
||||
storage_type=StorageType.BACKUP,
|
||||
filename=backup["Key"],
|
||||
)
|
||||
@@ -192,10 +199,12 @@ class CloudBackupAgent(BackupAgent):
|
||||
backups = await self._async_list_backups()
|
||||
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
|
||||
|
||||
async def _async_list_backups(self) -> list[StoredFile]:
|
||||
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await self._cloud.files.list(storage_type=StorageType.BACKUP)
|
||||
backups = await async_files_list(
|
||||
self._cloud, storage_type=StorageType.BACKUP
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
@@ -211,7 +220,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
backup = await self._async_get_backup(backup_id)
|
||||
return AgentBackup.from_dict(backup["Metadata"])
|
||||
|
||||
async def _async_get_backup(self, backup_id: str) -> StoredFile:
|
||||
async def _async_get_backup(self, backup_id: str) -> FilesHandlerListEntry:
|
||||
"""Return a backup."""
|
||||
backups = await self._async_list_backups()
|
||||
|
||||
|
@@ -40,11 +40,10 @@ from .prefs import CloudPreferences
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
VALID_REPAIR_TRANSLATION_KEYS = {
|
||||
"connection_error",
|
||||
"no_subscription",
|
||||
"warn_bad_custom_domain_configuration",
|
||||
"reset_bad_custom_domain_configuration",
|
||||
"subscription_expired",
|
||||
"warn_bad_custom_domain_configuration",
|
||||
}
|
||||
|
||||
|
||||
|
@@ -71,7 +71,7 @@ _CLOUD_ERRORS: dict[
|
||||
] = {
|
||||
TimeoutError: (
|
||||
HTTPStatus.BAD_GATEWAY,
|
||||
"Unable to reach the Home Assistant Cloud.",
|
||||
"Unable to reach the Home Assistant cloud.",
|
||||
),
|
||||
aiohttp.ClientError: (
|
||||
HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.108.0"],
|
||||
"requirements": ["hass-nabucasa==0.105.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user