Compare commits

..

1 Commits

Author SHA1 Message Date
Paulus Schoutsen
d184037f5a Add delete support to media source 2025-09-23 22:42:15 -04:00
340 changed files with 4856 additions and 21338 deletions

View File

@@ -58,7 +58,6 @@ base_platforms: &base_platforms
# Extra components that trigger the full suite
components: &components
- homeassistant/components/alexa/**
- homeassistant/components/analytics/**
- homeassistant/components/application_credentials/**
- homeassistant/components/assist_pipeline/**
- homeassistant/components/auth/**

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 8
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.11"
HA_SHORT_VERSION: "2025.10"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@@ -263,7 +263,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
@@ -570,7 +570,7 @@ jobs:
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -741,7 +741,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: .mypy_cache
key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with:
category: "/language:python"

View File

@@ -160,7 +160,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: home-assistant/wheels@2025.09.1
uses: home-assistant/wheels@2025.07.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -221,7 +221,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: home-assistant/wheels@2025.09.1
uses: home-assistant/wheels@2025.07.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

View File

@@ -443,7 +443,6 @@ homeassistant.components.rituals_perfume_genie.*
homeassistant.components.roborock.*
homeassistant.components.roku.*
homeassistant.components.romy.*
homeassistant.components.route_b_smart_meter.*
homeassistant.components.rpi_power.*
homeassistant.components.rss_feed_template.*
homeassistant.components.russound_rio.*

8
CODEOWNERS generated
View File

@@ -316,8 +316,6 @@ build.json @home-assistant/supervisor
/tests/components/crownstone/ @Crownstone @RicArch97
/homeassistant/components/cups/ @fabaff
/tests/components/cups/ @fabaff
/homeassistant/components/cync/ @Kinachi249
/tests/components/cync/ @Kinachi249
/homeassistant/components/daikin/ @fredrike
/tests/components/daikin/ @fredrike
/homeassistant/components/date/ @home-assistant/core
@@ -412,8 +410,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/egardia/ @jeroenterheerdt
/homeassistant/components/eheimdigital/ @autinerd
/tests/components/eheimdigital/ @autinerd
/homeassistant/components/ekeybionyx/ @richardpolzer
/tests/components/ekeybionyx/ @richardpolzer
/homeassistant/components/electrasmart/ @jafar-atili
/tests/components/electrasmart/ @jafar-atili
/homeassistant/components/electric_kiwi/ @mikey0000
@@ -976,6 +972,8 @@ build.json @home-assistant/supervisor
/tests/components/moat/ @bdraco
/homeassistant/components/mobile_app/ @home-assistant/core
/tests/components/mobile_app/ @home-assistant/core
/homeassistant/components/modbus/ @janiversen
/tests/components/modbus/ @janiversen
/homeassistant/components/modem_callerid/ @tkdrob
/tests/components/modem_callerid/ @tkdrob
/homeassistant/components/modern_forms/ @wonderslug
@@ -1334,8 +1332,6 @@ build.json @home-assistant/supervisor
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
/homeassistant/components/roon/ @pavoni
/tests/components/roon/ @pavoni
/homeassistant/components/route_b_smart_meter/ @SeraphicRav
/tests/components/route_b_smart_meter/ @SeraphicRav
/homeassistant/components/rpi_power/ @shenxn @swetoast
/tests/components/rpi_power/ @shenxn @swetoast
/homeassistant/components/rss_feed_template/ @home-assistant/core

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@@ -4,9 +4,11 @@ from __future__ import annotations
from datetime import timedelta
import logging
from typing import cast
from aioacaia.acaiascale import AcaiaScale
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
from bleak import BleakScanner
from homeassistant.components.bluetooth import async_get_scanner
from homeassistant.config_entries import ConfigEntry
@@ -43,7 +45,7 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
name=entry.title,
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
notify_callback=self.async_update_listeners,
scanner=async_get_scanner(hass),
scanner=cast(BleakScanner, async_get_scanner(hass)),
)
@property

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"requirements": ["accuweather==4.2.2"]
"requirements": ["accuweather==4.2.1"]
}

View File

@@ -4,18 +4,10 @@ from __future__ import annotations
from airos.airos8 import AirOS8
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
_PLATFORMS: list[Platform] = [
@@ -29,16 +21,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
)
session = async_get_clientsession(hass, verify_ssl=False)
airos_device = AirOS8(
host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=session,
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
@@ -51,30 +40,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
return True
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Migrate old config entry."""
if entry.version > 1:
# This means the user has downgraded from a future version
return False
if entry.version == 1 and entry.minor_version == 1:
new_data = {**entry.data}
advanced_data = {
CONF_SSL: DEFAULT_SSL,
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
}
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
hass.config_entries.async_update_entry(
entry,
data=new_data,
minor_version=2,
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -15,17 +15,10 @@ from airos.exceptions import (
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.data_entry_flow import section
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
from .const import DOMAIN
from .coordinator import AirOS8
_LOGGER = logging.getLogger(__name__)
@@ -35,15 +28,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME, default="ubnt"): str,
vol.Required(CONF_PASSWORD): str,
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
}
),
{"collapsed": True},
),
}
)
@@ -52,7 +36,6 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Ubiquiti airOS."""
VERSION = 1
MINOR_VERSION = 2
async def async_step_user(
self,
@@ -63,17 +46,13 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
if user_input is not None:
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(
self.hass,
verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
session = async_get_clientsession(self.hass, verify_ssl=False)
airos_device = AirOS8(
host=user_input[CONF_HOST],
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=session,
use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
try:
await airos_device.login()

View File

@@ -7,8 +7,3 @@ DOMAIN = "airos"
SCAN_INTERVAL = timedelta(minutes=1)
MANUFACTURER = "Ubiquiti"
DEFAULT_VERIFY_SSL = False
DEFAULT_SSL = True
SECTION_ADVANCED_SETTINGS = "advanced_settings"

View File

@@ -2,11 +2,11 @@
from __future__ import annotations
from homeassistant.const import CONF_HOST, CONF_SSL
from homeassistant.const import CONF_HOST
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
from .const import DOMAIN, MANUFACTURER
from .coordinator import AirOSDataUpdateCoordinator
@@ -20,14 +20,9 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
super().__init__(coordinator)
airos_data = self.coordinator.data
url_schema = (
"https"
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
else "http"
)
configuration_url: str | None = (
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
f"https://{coordinator.config_entry.data[CONF_HOST]}"
)
self._attr_device_info = DeviceInfo(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.5.3"]
"requirements": ["airos==0.5.1"]
}

View File

@@ -12,18 +12,6 @@
"host": "IP address or hostname of the airOS device",
"username": "Administrator username for the airOS device, normally 'ubnt'",
"password": "Password configured through the UISP app or web interface"
},
"sections": {
"advanced_settings": {
"data": {
"ssl": "Use HTTPS",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"ssl": "Whether the connection should be encrypted (required for most devices)",
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
}
}
}
}
},

View File

@@ -6,19 +6,17 @@ from collections.abc import Callable
from dataclasses import dataclass
from typing import Any, Final
from aioairzone.common import GrilleAngle, OperationMode, QAdapt, SleepTimeout
from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout
from aioairzone.const import (
API_COLD_ANGLE,
API_HEAT_ANGLE,
API_MODE,
API_Q_ADAPT,
API_SLEEP,
AZD_COLD_ANGLE,
AZD_HEAT_ANGLE,
AZD_MASTER,
AZD_MODE,
AZD_MODES,
AZD_Q_ADAPT,
AZD_SLEEP,
AZD_ZONES,
)
@@ -67,14 +65,6 @@ SLEEP_DICT: Final[dict[str, int]] = {
"90m": SleepTimeout.SLEEP_90,
}
Q_ADAPT_DICT: Final[dict[str, int]] = {
"standard": QAdapt.STANDARD,
"power": QAdapt.POWER,
"silence": QAdapt.SILENCE,
"minimum": QAdapt.MINIMUM,
"maximum": QAdapt.MAXIMUM,
}
def main_zone_options(
zone_data: dict[str, Any],
@@ -93,14 +83,6 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
options_fn=main_zone_options,
translation_key="modes",
),
AirzoneSelectDescription(
api_param=API_Q_ADAPT,
entity_category=EntityCategory.CONFIG,
key=AZD_Q_ADAPT,
options=list(Q_ADAPT_DICT),
options_dict=Q_ADAPT_DICT,
translation_key="q_adapt",
),
)

View File

@@ -63,16 +63,6 @@
"stop": "Stop"
}
},
"q_adapt": {
"name": "Q-Adapt",
"state": {
"standard": "Standard",
"power": "Power",
"silence": "Silence",
"minimum": "Minimum",
"maximum": "Maximum"
}
},
"sleep_times": {
"name": "Sleep",
"state": {

View File

@@ -10,7 +10,6 @@ from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SENSOR_STATE_OFF
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
@@ -21,7 +20,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import async_update_unique_id
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -33,7 +31,6 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
is_on_fn: Callable[[AmazonDevice, str], bool]
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
BINARY_SENSORS: Final = (
@@ -44,15 +41,46 @@ BINARY_SENSORS: Final = (
is_on_fn=lambda device, _: device.online,
),
AmazonBinarySensorEntityDescription(
key="detectionState",
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda device, key: bool(
device.sensors[key].value != SENSOR_STATE_OFF
),
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, _: device.bluetooth_state,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="humanPresenceDetectionState",
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: (
device.online and device.sensors[key].error is False
),
),
)
@@ -66,34 +94,13 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "detectionState" binary sensor
await async_update_unique_id(
hass,
coordinator,
BINARY_SENSOR_DOMAIN,
"humanPresenceDetectionState",
"detectionState",
async_add_entities(
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in BINARY_SENSORS
for serial_num in coordinator.data
if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key)
)
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in BINARY_SENSORS
for serial_num in new_devices
if sensor_desc.is_supported(
coordinator.data[serial_num], sensor_desc.key
)
)
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
"""Binary sensor device."""
@@ -106,13 +113,3 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
return self.entity_description.is_on_fn(
self.device, self.entity_description.key
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
data = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except CannotAuthenticate:
except (CannotAuthenticate, TypeError):
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
)
except CannotConnect:
errors["base"] = "cannot_connect"
except CannotAuthenticate:
except (CannotAuthenticate, TypeError):
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"

View File

@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
translation_key="cannot_retrieve_data_with_error",
translation_placeholders={"error": repr(err)},
) from err
except CannotAuthenticate as err:
except (CannotAuthenticate, TypeError) as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",

View File

@@ -60,5 +60,7 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
"online": device.online,
"serial number": device.serial_number,
"software version": device.software_version,
"sensors": device.sensors,
"do not disturb": device.do_not_disturb,
"response style": device.response_style,
"bluetooth state": device.bluetooth_state,
}

View File

@@ -1,4 +1,44 @@
{
"entity": {
"binary_sensor": {
"bluetooth": {
"default": "mdi:bluetooth-off",
"state": {
"on": "mdi:bluetooth"
}
},
"baby_cry_detection": {
"default": "mdi:account-voice-off",
"state": {
"on": "mdi:account-voice"
}
},
"beeping_appliance_detection": {
"default": "mdi:bell-off",
"state": {
"on": "mdi:bell-ring"
}
},
"cough_detection": {
"default": "mdi:blur-off",
"state": {
"on": "mdi:blur"
}
},
"dog_bark_detection": {
"default": "mdi:dog-side-off",
"state": {
"on": "mdi:dog-side"
}
},
"water_sounds_detection": {
"default": "mdi:water-pump-off",
"state": {
"on": "mdi:water-pump"
}
}
}
},
"services": {
"send_sound": {
"service": "mdi:cast-audio"

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==6.2.6"]
"quality_scale": "silver",
"requirements": ["aioamazondevices==6.0.0"]
}

View File

@@ -57,23 +57,13 @@ async def async_setup_entry(
coordinator = entry.runtime_data
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in NOTIFY
for serial_num in new_devices
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
and sensor_desc.is_supported(coordinator.data[serial_num])
)
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
async_add_entities(
AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in NOTIFY
for serial_num in coordinator.data
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
and sensor_desc.is_supported(coordinator.data[serial_num])
)
class AmazonNotifyEntity(AmazonEntity, NotifyEntity):

View File

@@ -53,7 +53,7 @@ rules:
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices: done
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done

View File

@@ -31,9 +31,6 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
"""Amazon Devices sensor entity description."""
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
SENSORS: Final = (
@@ -65,22 +62,12 @@ async def async_setup_entry(
coordinator = entry.runtime_data
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
AmazonSensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in SENSORS
for serial_num in new_devices
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
)
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
async_add_entities(
AmazonSensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in SENSORS
for serial_num in coordinator.data
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
)
class AmazonSensorEntity(AmazonEntity, SensorEntity):
@@ -102,13 +89,3 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.device.sensors[self.entity_description.key].value
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -58,6 +58,26 @@
}
},
"entity": {
"binary_sensor": {
"bluetooth": {
"name": "Bluetooth"
},
"baby_cry_detection": {
"name": "Baby crying"
},
"beeping_appliance_detection": {
"name": "Beeping appliance"
},
"cough_detection": {
"name": "Coughing"
},
"dog_bark_detection": {
"name": "Dog barking"
},
"water_sounds_detection": {
"name": "Water sounds"
}
},
"notify": {
"speak": {
"name": "Speak"

View File

@@ -8,17 +8,13 @@ from typing import TYPE_CHECKING, Any, Final
from aioamazondevices.api import AmazonDevice
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,
SwitchEntity,
SwitchEntityDescription,
)
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import alexa_api_call, async_update_unique_id
from .utils import alexa_api_call
PARALLEL_UPDATES = 1
@@ -28,17 +24,16 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
"""Alexa Devices switch entity description."""
is_on_fn: Callable[[AmazonDevice], bool]
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
subkey: str
method: str
SWITCHES: Final = (
AmazonSwitchEntityDescription(
key="dnd",
key="do_not_disturb",
subkey="AUDIO_PLAYER",
translation_key="do_not_disturb",
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
is_on_fn=lambda _device: _device.do_not_disturb,
method="set_do_not_disturb",
),
)
@@ -53,28 +48,13 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "DND" switch and remove from Speaker Group
await async_update_unique_id(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
async_add_entities(
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
for switch_desc in SWITCHES
for serial_num in coordinator.data
if switch_desc.subkey in coordinator.data[serial_num].capabilities
)
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
for switch_desc in SWITCHES
for serial_num in new_devices
if switch_desc.key in coordinator.data[serial_num].sensors
)
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
"""Switch device."""
@@ -104,13 +84,3 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
def is_on(self) -> bool:
"""Return True if switch is on."""
return self.entity_description.is_on_fn(self.device)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -6,12 +6,9 @@ from typing import Any, Concatenate
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er
from .const import _LOGGER, DOMAIN
from .coordinator import AmazonDevicesCoordinator
from .const import DOMAIN
from .entity import AmazonEntity
@@ -41,23 +38,3 @@ def alexa_api_call[_T: AmazonEntity, **_P](
) from err
return cmd_wrapper
async def async_update_unique_id(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
domain: str,
old_key: str,
new_key: str,
) -> None:
"""Update unique id for entities created with old format."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{old_key}"
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
_LOGGER.debug("Updating unique_id for %s", entity_id)
new_unique_id = unique_id.replace(old_key, new_key)
# Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)

View File

@@ -39,7 +39,7 @@ from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.singleton import singleton
from homeassistant.helpers.storage import Store
from homeassistant.helpers.system_info import async_get_system_info
from homeassistant.helpers.typing import UNDEFINED
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from homeassistant.loader import (
Integration,
IntegrationNotFound,
@@ -142,6 +142,7 @@ class EntityAnalyticsModifications:
"""
remove: bool = False
capabilities: dict[str, Any] | None | UndefinedType = UNDEFINED
class AnalyticsPlatformProtocol(Protocol):
@@ -505,7 +506,7 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
async def async_devices_payload(hass: HomeAssistant) -> dict:
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"""Return detailed information about entities and devices."""
dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass)
@@ -537,23 +538,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
integration_input[1].append(entity_entry.entity_id)
integrations = {
domain: integration
for domain, integration in (
await async_get_integrations(hass, integration_inputs.keys())
).items()
if isinstance(integration, Integration)
}
# Filter out custom integrations and integrations that are not device or hub type
integration_inputs = {
domain: integration_info
for domain, integration_info in integration_inputs.items()
if (integration := integrations.get(domain)) is not None
and integration.is_built_in
and integration.integration_type in ("device", "hub")
}
# Call integrations that implement the analytics platform
for integration_domain, integration_input in integration_inputs.items():
if (
@@ -676,14 +660,18 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
# we should replace it with the original value in the future.
# It is also not present, if entity is not in the state machine,
# which can happen for disabled entities.
"assumed_state": (
entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
if entity_state is not None
else None
),
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
if entity_state is not None
else None,
"capabilities": entity_config.capabilities
if entity_config.capabilities is not UNDEFINED
else entity_entry.capabilities,
"domain": entity_entry.domain,
"entity_category": entity_entry.entity_category,
"has_entity_name": entity_entry.has_entity_name,
"modified_by_integration": ["capabilities"]
if entity_config.capabilities is not UNDEFINED
else None,
"original_device_class": entity_entry.original_device_class,
# LIMITATION: `unit_of_measurement` can be overridden by users;
# we should replace it with the original value in the future.
@@ -700,6 +688,23 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
else:
entities_info.append(entity_info)
integrations = {
domain: integration
for domain, integration in (
await async_get_integrations(hass, integrations_info.keys())
).items()
if isinstance(integration, Integration)
}
for domain, integration_info in integrations_info.items():
if integration := integrations.get(domain):
integration_info["is_custom_integration"] = not integration.is_built_in
# Include version for custom integrations
if not integration.is_built_in and integration.version:
integration_info["custom_integration_version"] = str(
integration.version
)
return {
"version": "home-assistant:1",
"home_assistant": HA_VERSION,

View File

@@ -1308,9 +1308,7 @@ class PipelineRun:
# instead of a full response.
all_targets_in_satellite_area = (
self._get_all_targets_in_satellite_area(
conversation_result.response,
self._satellite_id,
self._device_id,
conversation_result.response, self._device_id
)
)
@@ -1339,62 +1337,39 @@ class PipelineRun:
return (speech, all_targets_in_satellite_area)
def _get_all_targets_in_satellite_area(
self,
intent_response: intent.IntentResponse,
satellite_id: str | None,
device_id: str | None,
self, intent_response: intent.IntentResponse, device_id: str | None
) -> bool:
"""Return true if all targeted entities were in the same area as the device."""
if (
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
or not intent_response.matched_states
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
or (not intent_response.matched_states)
or (not device_id)
):
return False
device_registry = dr.async_get(self.hass)
if (not (device := device_registry.async_get(device_id))) or (
not device.area_id
):
return False
entity_registry = er.async_get(self.hass)
device_registry = dr.async_get(self.hass)
area_id: str | None = None
if (
satellite_id is not None
and (target_entity_entry := entity_registry.async_get(satellite_id))
is not None
):
area_id = target_entity_entry.area_id
device_id = target_entity_entry.device_id
if area_id is None:
if device_id is None:
return False
device_entry = device_registry.async_get(device_id)
if device_entry is None:
return False
area_id = device_entry.area_id
if area_id is None:
return False
for state in intent_response.matched_states:
target_entity_entry = entity_registry.async_get(state.entity_id)
if target_entity_entry is None:
entity = entity_registry.async_get(state.entity_id)
if not entity:
return False
target_area_id = target_entity_entry.area_id
if target_area_id is None:
if target_entity_entry.device_id is None:
if (entity_area_id := entity.area_id) is None:
if (entity.device_id is None) or (
(entity_device := device_registry.async_get(entity.device_id))
is None
):
return False
target_device_entry = device_registry.async_get(
target_entity_entry.device_id
)
if target_device_entry is None:
return False
entity_area_id = entity_device.area_id
target_area_id = target_device_entry.area_id
if target_area_id != area_id:
if entity_area_id != device.area_id:
return False
return True

View File

@@ -0,0 +1,24 @@
"""Analytics platform."""
from homeassistant.components.analytics import (
AnalyticsInput,
AnalyticsModifications,
EntityAnalyticsModifications,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
async def async_modify_analytics(
hass: HomeAssistant, analytics_input: AnalyticsInput
) -> AnalyticsModifications:
"""Modify the analytics."""
ent_reg = er.async_get(hass)
entities: dict[str, EntityAnalyticsModifications] = {}
for entity_id in analytics_input.entity_ids:
entity_entry = ent_reg.entities[entity_id]
if entity_entry.capabilities is not None:
entities[entity_id] = EntityAnalyticsModifications(capabilities=None)
return AnalyticsModifications(entities=entities)

View File

@@ -3,12 +3,16 @@ beolink_allstandby:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_expand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
all_discovered:
required: false
@@ -33,6 +37,8 @@ beolink_join:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false
@@ -65,12 +71,16 @@ beolink_leave:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_unexpand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false

View File

@@ -13,30 +13,20 @@ from bluecurrent_api.exceptions import (
RequestLimitReached,
WebsocketError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import CONF_API_TOKEN, CONF_DEVICE_ID, Platform
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
ServiceValidationError,
)
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import ConfigType
from .const import (
BCU_APP,
CHARGEPOINT_SETTINGS,
CHARGEPOINT_STATUS,
CHARGING_CARD_ID,
DOMAIN,
EVSE_ID,
LOGGER,
PLUG_AND_CHARGE,
SERVICE_START_CHARGE_SESSION,
VALUE,
)
@@ -44,7 +34,6 @@ type BlueCurrentConfigEntry = ConfigEntry[Connector]
PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH]
CHARGE_POINTS = "CHARGE_POINTS"
CHARGE_CARDS = "CHARGE_CARDS"
DATA = "data"
DELAY = 5
@@ -52,16 +41,6 @@ GRID = "GRID"
OBJECT = "object"
VALUE_TYPES = [CHARGEPOINT_STATUS, CHARGEPOINT_SETTINGS]
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
SERVICE_START_CHARGE_SESSION_SCHEMA = vol.Schema(
{
vol.Required(CONF_DEVICE_ID): cv.string,
# When no charging card is provided, use no charging card (BCU_APP = no charging card).
vol.Optional(CHARGING_CARD_ID, default=BCU_APP): cv.string,
}
)
async def async_setup_entry(
hass: HomeAssistant, config_entry: BlueCurrentConfigEntry
@@ -88,66 +67,6 @@ async def async_setup_entry(
return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Blue Current."""
async def start_charge_session(service_call: ServiceCall) -> None:
"""Start a charge session with the provided device and charge card ID."""
# When no charge card is provided, use the default charge card set in the config flow.
charging_card_id = service_call.data[CHARGING_CARD_ID]
device_id = service_call.data[CONF_DEVICE_ID]
# Get the device based on the given device ID.
device = dr.async_get(hass).devices.get(device_id)
if device is None:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="invalid_device_id"
)
blue_current_config_entry: ConfigEntry | None = None
for config_entry_id in device.config_entries:
config_entry = hass.config_entries.async_get_entry(config_entry_id)
if not config_entry or config_entry.domain != DOMAIN:
# Not the blue_current config entry.
continue
if config_entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="config_entry_not_loaded"
)
blue_current_config_entry = config_entry
break
if not blue_current_config_entry:
# The device is not connected to a valid blue_current config entry.
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="no_config_entry"
)
connector = blue_current_config_entry.runtime_data
# Get the evse_id from the identifier of the device.
evse_id = next(
identifier[1]
for identifier in device.identifiers
if identifier[0] == DOMAIN
)
await connector.client.start_session(evse_id, charging_card_id)
hass.services.async_register(
DOMAIN,
SERVICE_START_CHARGE_SESSION,
start_charge_session,
SERVICE_START_CHARGE_SESSION_SCHEMA,
)
return True
async def async_unload_entry(
hass: HomeAssistant, config_entry: BlueCurrentConfigEntry
) -> bool:
@@ -168,7 +87,6 @@ class Connector:
self.client = client
self.charge_points: dict[str, dict] = {}
self.grid: dict[str, Any] = {}
self.charge_cards: dict[str, dict[str, Any]] = {}
async def on_data(self, message: dict) -> None:
"""Handle received data."""

View File

@@ -8,12 +8,6 @@ LOGGER = logging.getLogger(__package__)
EVSE_ID = "evse_id"
MODEL_TYPE = "model_type"
CARD = "card"
UID = "uid"
BCU_APP = "BCU-APP"
WITHOUT_CHARGING_CARD = "without_charging_card"
CHARGING_CARD_ID = "charging_card_id"
SERVICE_START_CHARGE_SESSION = "start_charge_session"
PLUG_AND_CHARGE = "plug_and_charge"
VALUE = "value"
PERMISSION = "permission"

View File

@@ -42,10 +42,5 @@
"default": "mdi:lock"
}
}
},
"services": {
"start_charge_session": {
"service": "mdi:play"
}
}
}

View File

@@ -1,12 +0,0 @@
start_charge_session:
fields:
device_id:
selector:
device:
integration: blue_current
required: true
charging_card_id:
selector:
text:
required: false

View File

@@ -22,16 +22,6 @@
"wrong_account": "Wrong account: Please authenticate with the API token for {email}."
}
},
"options": {
"step": {
"init": {
"data": {
"card": "Card"
},
"description": "Select the default charging card you want to use"
}
}
},
"entity": {
"sensor": {
"activity": {
@@ -146,39 +136,5 @@
"name": "Block charge point"
}
}
},
"selector": {
"select_charging_card": {
"options": {
"without_charging_card": "Without charging card"
}
}
},
"services": {
"start_charge_session": {
"name": "Start charge session",
"description": "Starts a new charge session on a specified charge point.",
"fields": {
"charging_card_id": {
"name": "Charging card ID",
"description": "Optional charging card ID that will be used to start a charge session. When not provided, no charging card will be used."
},
"device_id": {
"name": "Device ID",
"description": "The ID of the Blue Current charge point."
}
}
}
},
"exceptions": {
"invalid_device_id": {
"message": "Invalid device ID given."
},
"config_entry_not_loaded": {
"message": "Config entry not loaded."
},
"no_config_entry": {
"message": "Device has not a valid blue_current config entry."
}
}
}

View File

@@ -10,7 +10,6 @@ from asyncio import Future
from collections.abc import Callable, Iterable
from typing import TYPE_CHECKING, cast
from bleak import BleakScanner
from habluetooth import (
BaseHaScanner,
BluetoothScannerDevice,
@@ -39,16 +38,13 @@ def _get_manager(hass: HomeAssistant) -> HomeAssistantBluetoothManager:
@hass_callback
def async_get_scanner(hass: HomeAssistant) -> BleakScanner:
"""Return a HaBleakScannerWrapper cast to BleakScanner.
def async_get_scanner(hass: HomeAssistant) -> HaBleakScannerWrapper:
"""Return a HaBleakScannerWrapper.
This is a wrapper around our BleakScanner singleton that allows
multiple integrations to share the same BleakScanner.
The wrapper is cast to BleakScanner for type compatibility with
libraries expecting a BleakScanner instance.
"""
return cast(BleakScanner, HaBleakScannerWrapper())
return HaBleakScannerWrapper()
@hass_callback

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.1.2"],
"requirements": ["hass-nabucasa==1.1.1"],
"single_config_entry": true
}

View File

@@ -29,23 +29,10 @@ async def async_setup_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data["alarm_zones"])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitVedoBinarySensorEntity(
coordinator, device, config_entry.entry_id
)
for device in coordinator.data["alarm_zones"].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
async_add_entities(
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data["alarm_zones"].values()
)
class ComelitVedoBinarySensorEntity(

View File

@@ -25,27 +25,23 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = "111111"
DEFAULT_PIN = 111111
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
}
)

View File

@@ -29,21 +29,10 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data[COVER])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[COVER].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
async_add_entities(
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[COVER].values()
)
class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):

View File

@@ -27,21 +27,10 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data[LIGHT])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitLightEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[LIGHT].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
async_add_entities(
ComelitLightEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[LIGHT].values()
)
class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aiocomelit"],
"quality_scale": "platinum",
"quality_scale": "silver",
"requirements": ["aiocomelit==0.12.3"]
}

View File

@@ -57,7 +57,9 @@ rules:
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices: done
dynamic-devices:
status: todo
comment: missing implementation
entity-category:
status: exempt
comment: no config or diagnostic entities

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from typing import Final, cast
from aiocomelit.api import ComelitSerialBridgeObject, ComelitVedoZoneObject
from aiocomelit import ComelitSerialBridgeObject, ComelitVedoZoneObject
from aiocomelit.const import BRIDGE, OTHER, AlarmZoneState
from homeassistant.components.sensor import (
@@ -65,24 +65,15 @@ async def async_setup_bridge_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data[OTHER])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitBridgeSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
for sensor_desc in SENSOR_BRIDGE_TYPES
for device in coordinator.data[OTHER].values()
if device.index in new_devices
entities: list[ComelitBridgeSensorEntity] = []
for device in coordinator.data[OTHER].values():
entities.extend(
ComelitBridgeSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
for sensor_desc in SENSOR_BRIDGE_TYPES
)
async_add_entities(entities)
async def async_setup_vedo_entry(
@@ -94,24 +85,15 @@ async def async_setup_vedo_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data["alarm_zones"])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitVedoSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
for sensor_desc in SENSOR_VEDO_TYPES
for device in coordinator.data["alarm_zones"].values()
if device.index in new_devices
entities: list[ComelitVedoSensorEntity] = []
for device in coordinator.data["alarm_zones"].values():
entities.extend(
ComelitVedoSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
for sensor_desc in SENSOR_VEDO_TYPES
)
async_add_entities(entities)
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):

View File

@@ -39,25 +39,6 @@ async def async_setup_entry(
)
async_add_entities(entities)
known_devices: dict[str, set[int]] = {
dev_type: set() for dev_type in (IRRIGATION, OTHER)
}
def _check_device() -> None:
for dev_type in (IRRIGATION, OTHER):
current_devices = set(coordinator.data[dev_type])
new_devices = current_devices - known_devices[dev_type]
if new_devices:
known_devices[dev_type].update(new_devices)
async_add_entities(
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[dev_type].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
"""Switch device."""

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"]
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.3"]
}

View File

@@ -1,58 +0,0 @@
"""The Cync integration."""
from __future__ import annotations
from pycync import Auth, Cync, User
from pycync.exceptions import AuthFailedError, CyncError
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_AUTHORIZE_STRING,
CONF_EXPIRES_AT,
CONF_REFRESH_TOKEN,
CONF_USER_ID,
)
from .coordinator import CyncConfigEntry, CyncCoordinator
_PLATFORMS: list[Platform] = [Platform.LIGHT]
async def async_setup_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool:
"""Set up Cync from a config entry."""
user_info = User(
entry.data[CONF_ACCESS_TOKEN],
entry.data[CONF_REFRESH_TOKEN],
entry.data[CONF_AUTHORIZE_STRING],
entry.data[CONF_USER_ID],
expires_at=entry.data[CONF_EXPIRES_AT],
)
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
try:
cync = await Cync.create(cync_auth)
except AuthFailedError as ex:
raise ConfigEntryAuthFailed("User token invalid") from ex
except CyncError as ex:
raise ConfigEntryNotReady("Unable to connect to Cync") from ex
devices_coordinator = CyncCoordinator(hass, entry, cync)
cync.set_update_callback(devices_coordinator.on_data_update)
await devices_coordinator.async_config_entry_first_refresh()
entry.runtime_data = devices_coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool:
"""Unload a config entry."""
cync = entry.runtime_data.cync
await cync.shut_down()
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -1,118 +0,0 @@
"""Config flow for the Cync integration."""
from __future__ import annotations
import logging
from typing import Any
from pycync import Auth
from pycync.exceptions import AuthFailedError, CyncError, TwoFactorRequiredError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_AUTHORIZE_STRING,
CONF_EXPIRES_AT,
CONF_REFRESH_TOKEN,
CONF_TWO_FACTOR_CODE,
CONF_USER_ID,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_EMAIL): str,
vol.Required(CONF_PASSWORD): str,
}
)
STEP_TWO_FACTOR_SCHEMA = vol.Schema({vol.Required(CONF_TWO_FACTOR_CODE): str})
class CyncConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Cync."""
VERSION = 1
cync_auth: Auth
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Attempt login with user credentials."""
errors: dict[str, str] = {}
if user_input is None:
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
self.cync_auth = Auth(
async_get_clientsession(self.hass),
username=user_input[CONF_EMAIL],
password=user_input[CONF_PASSWORD],
)
try:
await self.cync_auth.login()
except AuthFailedError:
errors["base"] = "invalid_auth"
except TwoFactorRequiredError:
return await self.async_step_two_factor()
except CyncError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return await self._create_config_entry(self.cync_auth.username)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_two_factor(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Attempt login with the two factor auth code sent to the user."""
errors: dict[str, str] = {}
if user_input is None:
return self.async_show_form(
step_id="two_factor", data_schema=STEP_TWO_FACTOR_SCHEMA, errors=errors
)
try:
await self.cync_auth.login(user_input[CONF_TWO_FACTOR_CODE])
except AuthFailedError:
errors["base"] = "invalid_auth"
except CyncError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return await self._create_config_entry(self.cync_auth.username)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def _create_config_entry(self, user_email: str) -> ConfigFlowResult:
"""Create the Cync config entry using input user data."""
cync_user = self.cync_auth.user
await self.async_set_unique_id(str(cync_user.user_id))
self._abort_if_unique_id_configured()
config = {
CONF_USER_ID: cync_user.user_id,
CONF_AUTHORIZE_STRING: cync_user.authorize,
CONF_EXPIRES_AT: cync_user.expires_at,
CONF_ACCESS_TOKEN: cync_user.access_token,
CONF_REFRESH_TOKEN: cync_user.refresh_token,
}
return self.async_create_entry(title=user_email, data=config)

View File

@@ -1,9 +0,0 @@
"""Constants for the Cync integration."""
DOMAIN = "cync"
CONF_TWO_FACTOR_CODE = "two_factor_code"
CONF_USER_ID = "user_id"
CONF_AUTHORIZE_STRING = "authorize_string"
CONF_EXPIRES_AT = "expires_at"
CONF_REFRESH_TOKEN = "refresh_token"

View File

@@ -1,87 +0,0 @@
"""Coordinator to handle keeping device states up to date."""
from __future__ import annotations
from datetime import timedelta
import logging
import time
from pycync import Cync, CyncDevice, User
from pycync.exceptions import AuthFailedError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_EXPIRES_AT, CONF_REFRESH_TOKEN
_LOGGER = logging.getLogger(__name__)
type CyncConfigEntry = ConfigEntry[CyncCoordinator]
class CyncCoordinator(DataUpdateCoordinator[dict[int, CyncDevice]]):
"""Coordinator to handle updating Cync device states."""
config_entry: CyncConfigEntry
def __init__(
self, hass: HomeAssistant, config_entry: CyncConfigEntry, cync: Cync
) -> None:
"""Initialize the Cync coordinator."""
super().__init__(
hass,
_LOGGER,
name="Cync Data Coordinator",
config_entry=config_entry,
update_interval=timedelta(seconds=30),
always_update=True,
)
self.cync = cync
async def on_data_update(self, data: dict[int, CyncDevice]) -> None:
"""Update registered devices with new data."""
merged_data = self.data | data if self.data else data
self.async_set_updated_data(merged_data)
async def _async_setup(self) -> None:
"""Set up the coordinator with initial device states."""
logged_in_user = self.cync.get_logged_in_user()
if logged_in_user.access_token != self.config_entry.data[CONF_ACCESS_TOKEN]:
await self._update_config_cync_credentials(logged_in_user)
async def _async_update_data(self) -> dict[int, CyncDevice]:
"""First, refresh the user's auth token if it is set to expire in less than one hour.
Then, fetch all current device states.
"""
logged_in_user = self.cync.get_logged_in_user()
if logged_in_user.expires_at - time.time() < 3600:
await self._async_refresh_cync_credentials()
self.cync.update_device_states()
current_device_states = self.cync.get_devices()
return {device.device_id: device for device in current_device_states}
async def _async_refresh_cync_credentials(self) -> None:
"""Attempt to refresh the Cync user's authentication token."""
try:
refreshed_user = await self.cync.refresh_credentials()
except AuthFailedError as ex:
raise ConfigEntryAuthFailed("Unable to refresh user token") from ex
else:
await self._update_config_cync_credentials(refreshed_user)
async def _update_config_cync_credentials(self, user_info: User) -> None:
"""Update the config entry with current user info."""
new_data = {**self.config_entry.data}
new_data[CONF_ACCESS_TOKEN] = user_info.access_token
new_data[CONF_REFRESH_TOKEN] = user_info.refresh_token
new_data[CONF_EXPIRES_AT] = user_info.expires_at
self.hass.config_entries.async_update_entry(self.config_entry, data=new_data)

View File

@@ -1,45 +0,0 @@
"""Setup for a generic entity type for the Cync integration."""
from pycync.devices import CyncDevice
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import CyncCoordinator
class CyncBaseEntity(CoordinatorEntity[CyncCoordinator]):
"""Generic base entity for Cync devices."""
_attr_has_entity_name = True
def __init__(
self,
device: CyncDevice,
coordinator: CyncCoordinator,
room_name: str | None = None,
) -> None:
"""Pass coordinator to CoordinatorEntity."""
super().__init__(coordinator)
self._cync_device_id = device.device_id
self._attr_unique_id = device.unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.unique_id)},
manufacturer="GE Lighting",
name=device.name,
suggested_area=room_name,
)
@property
def available(self) -> bool:
"""Determines whether this device is currently available."""
return (
super().available
and self.coordinator.data is not None
and self._cync_device_id in self.coordinator.data
and self.coordinator.data[self._cync_device_id].is_online
)

View File

@@ -1,180 +0,0 @@
"""Support for Cync light entities."""
from typing import Any
from pycync import CyncLight
from pycync.devices.capabilities import CyncCapability
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP_KELVIN,
ATTR_RGB_COLOR,
ColorMode,
LightEntity,
filter_supported_color_modes,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.color import value_to_brightness
from homeassistant.util.scaling import scale_ranged_value_to_int_range
from .coordinator import CyncConfigEntry, CyncCoordinator
from .entity import CyncBaseEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: CyncConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Cync lights from a config entry."""
coordinator = entry.runtime_data
cync = coordinator.cync
entities_to_add = []
for home in cync.get_homes():
for room in home.rooms:
room_lights = [
CyncLightEntity(device, coordinator, room.name)
for device in room.devices
if isinstance(device, CyncLight)
]
entities_to_add.extend(room_lights)
group_lights = [
CyncLightEntity(device, coordinator, room.name)
for group in room.groups
for device in group.devices
if isinstance(device, CyncLight)
]
entities_to_add.extend(group_lights)
async_add_entities(entities_to_add)
class CyncLightEntity(CyncBaseEntity, LightEntity):
"""Representation of a Cync light."""
_attr_color_mode = ColorMode.ONOFF
_attr_min_color_temp_kelvin = 2000
_attr_max_color_temp_kelvin = 7000
_attr_translation_key = "light"
_attr_name = None
BRIGHTNESS_SCALE = (0, 100)
def __init__(
self,
device: CyncLight,
coordinator: CyncCoordinator,
room_name: str | None = None,
) -> None:
"""Set up base attributes."""
super().__init__(device, coordinator, room_name)
supported_color_modes = {ColorMode.ONOFF}
if device.supports_capability(CyncCapability.CCT_COLOR):
supported_color_modes.add(ColorMode.COLOR_TEMP)
if device.supports_capability(CyncCapability.DIMMING):
supported_color_modes.add(ColorMode.BRIGHTNESS)
if device.supports_capability(CyncCapability.RGB_COLOR):
supported_color_modes.add(ColorMode.RGB)
self._attr_supported_color_modes = filter_supported_color_modes(
supported_color_modes
)
@property
def is_on(self) -> bool | None:
"""Return True if the light is on."""
return self._device.is_on
@property
def brightness(self) -> int:
"""Provide the light's current brightness."""
return value_to_brightness(self.BRIGHTNESS_SCALE, self._device.brightness)
@property
def color_temp_kelvin(self) -> int:
"""Return color temperature in kelvin."""
return scale_ranged_value_to_int_range(
(1, 100),
(self.min_color_temp_kelvin, self.max_color_temp_kelvin),
self._device.color_temp,
)
@property
def rgb_color(self) -> tuple[int, int, int]:
"""Provide the light's current color in RGB format."""
return self._device.rgb
@property
def color_mode(self) -> str | None:
"""Return the active color mode."""
if (
self._device.supports_capability(CyncCapability.CCT_COLOR)
and self._device.color_mode > 0
and self._device.color_mode <= 100
):
return ColorMode.COLOR_TEMP
if (
self._device.supports_capability(CyncCapability.RGB_COLOR)
and self._device.color_mode == 254
):
return ColorMode.RGB
if self._device.supports_capability(CyncCapability.DIMMING):
return ColorMode.BRIGHTNESS
return ColorMode.ONOFF
async def async_turn_on(self, **kwargs: Any) -> None:
"""Process an action on the light."""
if not kwargs:
await self._device.turn_on()
elif kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None:
color_temp = kwargs.get(ATTR_COLOR_TEMP_KELVIN)
converted_color_temp = self._normalize_color_temp(color_temp)
await self._device.set_color_temp(converted_color_temp)
elif kwargs.get(ATTR_RGB_COLOR) is not None:
rgb = kwargs.get(ATTR_RGB_COLOR)
await self._device.set_rgb(rgb)
elif kwargs.get(ATTR_BRIGHTNESS) is not None:
brightness = kwargs.get(ATTR_BRIGHTNESS)
converted_brightness = self._normalize_brightness(brightness)
await self._device.set_brightness(converted_brightness)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the light."""
await self._device.turn_off()
def _normalize_brightness(self, brightness: float | None) -> int | None:
"""Return calculated brightness value scaled between 0-100."""
if brightness is not None:
return int((brightness / 255) * 100)
return None
def _normalize_color_temp(self, color_temp_kelvin: float | None) -> int | None:
"""Return calculated color temp value scaled between 1-100."""
if color_temp_kelvin is not None:
kelvin_range = self.max_color_temp_kelvin - self.min_color_temp_kelvin
scaled_kelvin = int(
((color_temp_kelvin - self.min_color_temp_kelvin) / kelvin_range) * 100
)
if scaled_kelvin == 0:
scaled_kelvin += 1
return scaled_kelvin
return None
@property
def _device(self) -> CyncLight:
"""Fetch the reference to the backing Cync light for this device."""
return self.coordinator.data[self._cync_device_id]

View File

@@ -1,11 +0,0 @@
{
"domain": "cync",
"name": "Cync",
"codeowners": ["@Kinachi249"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/cync",
"integration_type": "hub",
"iot_class": "cloud_push",
"quality_scale": "bronze",
"requirements": ["pycync==0.4.0"]
}

View File

@@ -1,69 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
This integration does not provide additional actions.
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow: todo
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: todo
docs-supported-functions: done
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -1,32 +0,0 @@
{
"config": {
"step": {
"user": {
"data": {
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "Your Cync account's email address",
"password": "Your Cync account's password"
}
},
"two_factor": {
"data": {
"two_factor_code": "Two-factor code"
},
"data_description": {
"two_factor_code": "The two-factor code sent to your Cync account's email"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
}
}
}

View File

@@ -6,13 +6,12 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
from homeassistant.const import CONF_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
trace_condition_function,
)
from homeassistant.helpers.typing import ConfigType
@@ -56,40 +55,19 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition):
"""Device condition."""
_hass: HomeAssistant
_config: ConfigType
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
self._hass = hass
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config.
This is here just to satisfy the abstract class interface. It is never called.
"""
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
"""Validate device condition config."""
return await async_validate_device_automation_config(
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
)
async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition."""

View File

@@ -1,24 +0,0 @@
"""The Ekey Bionyx integration."""
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
PLATFORMS: list[Platform] = [Platform.EVENT]
type EkeyBionyxConfigEntry = ConfigEntry
async def async_setup_entry(hass: HomeAssistant, entry: EkeyBionyxConfigEntry) -> bool:
"""Set up the Ekey Bionyx config entry."""
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: EkeyBionyxConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -1,14 +0,0 @@
"""application_credentials platform the Ekey Bionyx integration."""
from homeassistant.components.application_credentials import AuthorizationServer
from homeassistant.core import HomeAssistant
from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
"""Return authorization server."""
return AuthorizationServer(
authorize_url=OAUTH2_AUTHORIZE,
token_url=OAUTH2_TOKEN,
)

View File

@@ -1,271 +0,0 @@
"""Config flow for ekey bionyx."""
import asyncio
import json
import logging
import re
import secrets
from typing import Any, NotRequired, TypedDict
import aiohttp
import ekey_bionyxpy
import voluptuous as vol
from homeassistant.components.webhook import (
async_generate_id as webhook_generate_id,
async_generate_path as webhook_generate_path,
)
from homeassistant.config_entries import ConfigFlowResult
from homeassistant.const import CONF_TOKEN, CONF_URL
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.network import get_url
from homeassistant.helpers.selector import SelectOptionDict, SelectSelector
from .const import API_URL, DOMAIN, INTEGRATION_NAME, SCOPE
# Valid webhook name: starts with letter or underscore, contains letters, digits, spaces, dots, and underscores, does not end with space or dot
VALID_NAME_PATTERN = re.compile(r"^(?![\d\s])[\w\d \.]*[\w\d]$")
class ConfigFlowEkeyApi(ekey_bionyxpy.AbstractAuth):
"""ekey bionyx authentication before a ConfigEntry exists.
This implementation directly provides the token without supporting refresh.
"""
def __init__(
self,
websession: aiohttp.ClientSession,
token: dict[str, Any],
) -> None:
"""Initialize ConfigFlowEkeyApi."""
super().__init__(websession, API_URL)
self._token = token
async def async_get_access_token(self) -> str:
"""Return the token for the Ekey API."""
return self._token["access_token"]
class EkeyFlowData(TypedDict):
"""Type for Flow Data."""
api: NotRequired[ekey_bionyxpy.BionyxAPI]
system: NotRequired[ekey_bionyxpy.System]
systems: NotRequired[list[ekey_bionyxpy.System]]
class OAuth2FlowHandler(
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
):
"""Config flow to handle ekey bionyx OAuth2 authentication."""
DOMAIN = DOMAIN
check_deletion_task: asyncio.Task[None] | None = None
def __init__(self) -> None:
"""Initialize OAuth2FlowHandler."""
super().__init__()
self._data: EkeyFlowData = {}
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
@property
def extra_authorize_data(self) -> dict[str, Any]:
"""Extra data that needs to be appended to the authorize url."""
return {"scope": SCOPE}
async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
"""Start the user facing flow by initializing the API and getting the systems."""
client = ConfigFlowEkeyApi(async_get_clientsession(self.hass), data[CONF_TOKEN])
ap = ekey_bionyxpy.BionyxAPI(client)
self._data["api"] = ap
try:
system_res = await ap.get_systems()
except aiohttp.ClientResponseError:
return self.async_abort(
reason="cannot_connect",
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
)
system = [s for s in system_res if s.own_system]
if len(system) == 0:
return self.async_abort(reason="no_own_systems")
self._data["systems"] = system
if len(system) == 1:
# skipping choose_system since there is only one
self._data["system"] = system[0]
return await self.async_step_check_system(user_input=None)
return await self.async_step_choose_system(user_input=None)
async def async_step_choose_system(
self, user_input: dict[str, Any] | None
) -> ConfigFlowResult:
"""Dialog to choose System if multiple systems are present."""
if user_input is None:
options: list[SelectOptionDict] = [
{"value": s.system_id, "label": s.system_name}
for s in self._data["systems"]
]
data_schema = {vol.Required("system"): SelectSelector({"options": options})}
return self.async_show_form(
step_id="choose_system",
data_schema=vol.Schema(data_schema),
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
)
self._data["system"] = [
s for s in self._data["systems"] if s.system_id == user_input["system"]
][0]
return await self.async_step_check_system(user_input=None)
async def async_step_check_system(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Check if system has open webhooks."""
system = self._data["system"]
await self.async_set_unique_id(system.system_id)
self._abort_if_unique_id_configured()
if (
system.function_webhook_quotas["free"] == 0
and system.function_webhook_quotas["used"] == 0
):
return self.async_abort(
reason="no_available_webhooks",
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
)
if system.function_webhook_quotas["used"] > 0:
return await self.async_step_delete_webhooks()
return await self.async_step_webhooks(user_input=None)
async def async_step_webhooks(
self, user_input: dict[str, Any] | None
) -> ConfigFlowResult:
"""Dialog to setup webhooks."""
system = self._data["system"]
errors: dict[str, str] | None = None
if user_input is not None:
errors = {}
for key, webhook_name in user_input.items():
if key == CONF_URL:
continue
if not re.match(VALID_NAME_PATTERN, webhook_name):
errors.update({key: "invalid_name"})
try:
cv.url(user_input[CONF_URL])
except vol.Invalid:
errors[CONF_URL] = "invalid_url"
if set(user_input) == {CONF_URL}:
errors["base"] = "no_webhooks_provided"
if not errors:
webhook_data = [
{
"auth": secrets.token_hex(32),
"name": webhook_name,
"webhook_id": webhook_generate_id(),
}
for key, webhook_name in user_input.items()
if key != CONF_URL
]
for webhook in webhook_data:
wh_def: ekey_bionyxpy.WebhookData = {
"integrationName": "Home Assistant",
"functionName": webhook["name"],
"locationName": "Home Assistant",
"definition": {
"url": user_input[CONF_URL]
+ webhook_generate_path(webhook["webhook_id"]),
"authentication": {"apiAuthenticationType": "None"},
"securityLevel": "AllowHttp",
"method": "Post",
"body": {
"contentType": "application/json",
"content": json.dumps({"auth": webhook["auth"]}),
},
},
}
webhook["ekey_id"] = (await system.add_webhook(wh_def)).webhook_id
return self.async_create_entry(
title=self._data["system"].system_name,
data={"webhooks": webhook_data},
)
data_schema: dict[Any, Any] = {
vol.Optional(f"webhook{i + 1}"): vol.All(str, vol.Length(max=50))
for i in range(self._data["system"].function_webhook_quotas["free"])
}
data_schema[vol.Required(CONF_URL)] = str
return self.async_show_form(
step_id="webhooks",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(data_schema),
{
CONF_URL: get_url(
self.hass,
allow_ip=True,
prefer_external=False,
)
}
| (user_input or {}),
),
errors=errors,
description_placeholders={
"webhooks_available": str(
self._data["system"].function_webhook_quotas["free"]
),
"ekeybionyx": INTEGRATION_NAME,
},
)
async def async_step_delete_webhooks(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Form to delete Webhooks."""
if user_input is None:
return self.async_show_form(step_id="delete_webhooks")
for webhook in await self._data["system"].get_webhooks():
await webhook.delete()
return await self.async_step_wait_for_deletion(user_input=None)
async def async_step_wait_for_deletion(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Wait for webhooks to be deleted in another flow."""
uncompleted_task: asyncio.Task[None] | None = None
if not self.check_deletion_task:
self.check_deletion_task = self.hass.async_create_task(
self.async_check_deletion_status()
)
if not self.check_deletion_task.done():
progress_action = "check_deletion_status"
uncompleted_task = self.check_deletion_task
if uncompleted_task:
return self.async_show_progress(
step_id="wait_for_deletion",
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
progress_action=progress_action,
progress_task=uncompleted_task,
)
self.check_deletion_task = None
return self.async_show_progress_done(next_step_id="webhooks")
async def async_check_deletion_status(self) -> None:
"""Check if webhooks have been deleted."""
while True:
self._data["systems"] = await self._data["api"].get_systems()
self._data["system"] = [
s
for s in self._data["systems"]
if s.system_id == self._data["system"].system_id
][0]
if self._data["system"].function_webhook_quotas["used"] == 0:
break
await asyncio.sleep(5)

View File

@@ -1,13 +0,0 @@
"""Constants for the Ekey Bionyx integration."""
import logging
DOMAIN = "ekeybionyx"
INTEGRATION_NAME = "ekey bionyx"
LOGGER = logging.getLogger(__package__)
OAUTH2_AUTHORIZE = "https://ekeybionyxprod.b2clogin.com/ekeybionyxprod.onmicrosoft.com/B2C_1_sign_in_v2/oauth2/v2.0/authorize"
OAUTH2_TOKEN = "https://ekeybionyxprod.b2clogin.com/ekeybionyxprod.onmicrosoft.com/B2C_1_sign_in_v2/oauth2/v2.0/token"
API_URL = "https://api.bionyx.io/3rd-party/api"
SCOPE = "https://ekeybionyxprod.onmicrosoft.com/3rd-party-api/api-access"

View File

@@ -1,70 +0,0 @@
"""Event platform for ekey bionyx integration."""
from aiohttp.hdrs import METH_POST
from aiohttp.web import Request, Response
from homeassistant.components.event import EventDeviceClass, EventEntity
from homeassistant.components.webhook import (
async_register as webhook_register,
async_unregister as webhook_unregister,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import EkeyBionyxConfigEntry
from .const import DOMAIN
async def async_setup_entry(
hass: HomeAssistant,
entry: EkeyBionyxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Ekey event."""
async_add_entities(EkeyEvent(data) for data in entry.data["webhooks"])
class EkeyEvent(EventEntity):
"""Ekey Event."""
_attr_device_class = EventDeviceClass.BUTTON
_attr_event_types = ["event happened"]
def __init__(
self,
data: dict[str, str],
) -> None:
"""Initialise a Ekey event entity."""
self._attr_name = data["name"]
self._attr_unique_id = data["ekey_id"]
self._webhook_id = data["webhook_id"]
self._auth = data["auth"]
@callback
def _async_handle_event(self) -> None:
"""Handle the webhook event."""
self._trigger_event("event happened")
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Register callbacks with your device API/library."""
async def async_webhook_handler(
hass: HomeAssistant, webhook_id: str, request: Request
) -> Response | None:
if (await request.json())["auth"] == self._auth:
self._async_handle_event()
return None
webhook_register(
self.hass,
DOMAIN,
f"Ekey {self._attr_name}",
self._webhook_id,
async_webhook_handler,
allowed_methods=[METH_POST],
)
async def async_will_remove_from_hass(self) -> None:
"""Unregister Webhook."""
webhook_unregister(self.hass, self._webhook_id)

View File

@@ -1,11 +0,0 @@
{
"domain": "ekeybionyx",
"name": "ekey bionyx",
"codeowners": ["@richardpolzer"],
"config_flow": true,
"dependencies": ["application_credentials", "http"],
"documentation": "https://www.home-assistant.io/integrations/ekeybionyx",
"iot_class": "local_push",
"quality_scale": "bronze",
"requirements": ["ekey-bionyxpy==1.0.0"]
}

View File

@@ -1,92 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: This integration does not provide actions.
appropriate-polling:
status: exempt
comment: This integration does not poll.
brands: done
common-modules: done
config-flow: done
config-flow-test-coverage: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not provide actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data:
status: exempt
comment: This integration does not connect to any device or service.
test-before-configure: done
test-before-setup:
status: exempt
comment: This integration does not connect to any device or service.
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: This integration does not provide actions.
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable:
status: exempt
comment: This integration has no way of knowing if the fingerprint reader is offline.
integration-owner: done
log-when-unavailable:
status: exempt
comment: This integration has no way of knowing if the fingerprint reader is offline.
parallel-updates:
status: exempt
comment: This integration does not poll.
reauthentication-flow:
status: exempt
comment: This integration does not store the tokens.
test-coverage: todo
# Gold
devices:
status: exempt
comment: This integration does not connect to any device or service.
diagnostics: todo
discovery-update-info:
status: exempt
comment: This integration does not support discovery.
discovery:
status: exempt
comment: This integration does not support discovery.
docs-data-update: todo
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: This integration does not connect to any device or service.
entity-category: todo
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: This integration has no entities that should be disabled by default.
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices:
status: exempt
comment: This integration does not connect to any device or service.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -1,66 +0,0 @@
{
"config": {
"step": {
"pick_implementation": {
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
},
"choose_system": {
"data": {
"system": "System"
},
"data_description": {
"system": "System the event entities should be set up for."
},
"description": "Please select the {ekeybionyx} system which you want to connect to Home Assistant."
},
"webhooks": {
"description": "Please name your event entities. These event entities will be mapped as functions in the {ekeybionyx} app. You can configure up to {webhooks_available} event entities. Leaving a name empty will skip the setup of that event entity.",
"data": {
"webhook1": "Event entity 1",
"webhook2": "Event entity 2",
"webhook3": "Event entity 3",
"webhook4": "Event entity 4",
"webhook5": "Event entity 5",
"url": "Home Assistant URL"
},
"data_description": {
"webhook1": "Name of event entity 1 that will be mapped into a function",
"webhook2": "Name of event entity 2 that will be mapped into a function",
"webhook3": "Name of event entity 3 that will be mapped into a function",
"webhook4": "Name of event entity 4 that will be mapped into a function",
"webhook5": "Name of event entity 5 that will be mapped into a function",
"url": "Home Assistant instance URL which can be reached from the fingerprint controller"
}
},
"delete_webhooks": {
"description": "This system has already been connected to Home Assistant. If you continue, the previously configured functions will be deleted."
}
},
"progress": {
"check_deletion_status": "Please open the {ekeybionyx} app and confirm the deletion of the functions."
},
"error": {
"invalid_name": "Name is invalid",
"invalid_url": "URL is invalid",
"no_webhooks_provided": "No event names provided"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
"no_available_webhooks": "There are no available webhooks in the {ekeybionyx} system. Please delete some and try again.",
"no_own_systems": "Your account does not have admin access to any systems.",
"cannot_connect": "Connection to {ekeybionyx} failed. Please check your Internet connection and try again."
},
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"
}
}
}

View File

@@ -57,7 +57,6 @@ from .manager import async_replace_device
ERROR_REQUIRES_ENCRYPTION_KEY = "requires_encryption_key"
ERROR_INVALID_ENCRYPTION_KEY = "invalid_psk"
ERROR_INVALID_PASSWORD_AUTH = "invalid_auth"
_LOGGER = logging.getLogger(__name__)
ZERO_NOISE_PSK = "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA="
@@ -138,11 +137,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
self._password = ""
return await self._async_authenticate_or_add()
if error == ERROR_INVALID_PASSWORD_AUTH or (
error is None and self._device_info and self._device_info.uses_password
):
return await self.async_step_authenticate()
if error is None and entry_data.get(CONF_NOISE_PSK):
# Device was configured with encryption but now connects without it.
# Check if it's the same device before offering to remove encryption.
@@ -696,15 +690,13 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
cli = APIClient(
host,
port or DEFAULT_PORT,
self._password or "",
"",
zeroconf_instance=zeroconf_instance,
noise_psk=noise_psk,
)
try:
await cli.connect()
self._device_info = await cli.device_info()
except InvalidAuthAPIError:
return ERROR_INVALID_PASSWORD_AUTH
except RequiresEncryptionAPIError:
return ERROR_REQUIRES_ENCRYPTION_KEY
except InvalidEncryptionKeyAPIError as ex:

View File

@@ -372,9 +372,6 @@ class ESPHomeManager:
"""Subscribe to states and list entities on successful API login."""
try:
await self._on_connect()
except InvalidAuthAPIError as err:
_LOGGER.warning("Authentication failed for %s: %s", self.host, err)
await self._start_reauth_and_disconnect()
except APIConnectionError as err:
_LOGGER.warning(
"Error getting setting up connection for %s: %s", self.host, err
@@ -644,14 +641,7 @@ class ESPHomeManager:
if self.reconnect_logic:
await self.reconnect_logic.stop()
return
await self._start_reauth_and_disconnect()
async def _start_reauth_and_disconnect(self) -> None:
"""Start reauth flow and stop reconnection attempts."""
self.entry.async_start_reauth(self.hass)
await self.cli.disconnect()
if self.reconnect_logic:
await self.reconnect_logic.stop()
async def _handle_dynamic_encryption_key(
self, device_info: EsphomeDeviceInfo
@@ -1073,7 +1063,7 @@ def _async_register_service(
service_name,
{
"description": (
f"Performs the action {service.name} of the node {device_info.name}"
f"Calls the service {service.name} of the node {device_info.name}"
),
"fields": fields,
},

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==41.11.0",
"aioesphomeapi==41.9.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.3.0"
],

View File

@@ -26,14 +26,11 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
super().__init__(coordinator)
self._serial = serial
self._camera_name = self.data["name"]
connections = set()
if mac_address := self.data["mac_address"]:
connections.add((CONNECTION_NETWORK_MAC, mac_address))
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial)},
connections=connections,
connections={
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
},
manufacturer=MANUFACTURER,
model=self.data["device_sub_category"],
name=self.data["name"],
@@ -65,14 +62,11 @@ class EzvizBaseEntity(Entity):
self._serial = serial
self.coordinator = coordinator
self._camera_name = self.data["name"]
connections = set()
if mac_address := self.data["mac_address"]:
connections.add((CONNECTION_NETWORK_MAC, mac_address))
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial)},
connections=connections,
connections={
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
},
manufacturer=MANUFACTURER,
model=self.data["device_sub_category"],
name=self.data["name"],

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250926.0"]
"requirements": ["home-assistant-frontend==20250903.5"]
}

View File

@@ -1,10 +1,8 @@
load_url:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
url:
example: "https://home-assistant.io"
required: true
@@ -12,12 +10,10 @@ load_url:
text:
set_config:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
key:
example: "motionSensitivity"
required: true
@@ -30,14 +26,12 @@ set_config:
text:
start_application:
target:
device:
integration: fully_kiosk
fields:
application:
example: "de.ozerov.fully"
required: true
selector:
text:
device_id:
required: true
selector:
device:
integration: fully_kiosk

View File

@@ -147,10 +147,6 @@
"name": "Load URL",
"description": "Loads a URL on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "Device ID",
"description": "The target device for this action."
},
"url": {
"name": "[%key:common::config_flow::data::url%]",
"description": "URL to load."
@@ -161,10 +157,6 @@
"name": "Set configuration",
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
},
"key": {
"name": "Key",
"description": "Configuration parameter to set."
@@ -182,10 +174,6 @@
"application": {
"name": "Application",
"description": "Package name of the application to start."
},
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
}
}
}

View File

@@ -77,10 +77,10 @@ class GeniusDevice(GeniusEntity):
async def async_update(self) -> None:
"""Update an entity's state data."""
if (state := self._device.data.get("_state")) and (
last_comms := state.get("lastComms")
) is not None: # only via v3 API
self._last_comms = dt_util.utc_from_timestamp(last_comms)
if "_state" in self._device.data: # only via v3 API
self._last_comms = dt_util.utc_from_timestamp(
self._device.data["_state"]["lastComms"]
)
class GeniusZone(GeniusEntity):

View File

@@ -1,5 +1,7 @@
set_vacation:
target:
device:
integration: google_mail
entity:
integration: google_mail
fields:

View File

@@ -22,6 +22,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.20.0"],
"requirements": ["aiohomeconnect==0.19.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -32,12 +32,15 @@ set_location:
stop:
toggle:
target:
entity: {}
turn_on:
target:
entity: {}
turn_off:
target:
entity: {}
update_entity:
fields:
@@ -50,6 +53,8 @@ update_entity:
reload_custom_templates:
reload_config_entry:
target:
entity: {}
device: {}
fields:
entry_id:
advanced: true

View File

@@ -28,7 +28,7 @@ from homeassistant.config_entries import (
OptionsFlow,
)
from homeassistant.core import callback
from homeassistant.data_entry_flow import AbortFlow, progress_step
from homeassistant.data_entry_flow import AbortFlow
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.hassio import is_hassio
@@ -72,6 +72,8 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
"""Base flow to install firmware."""
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
_failed_addon_name: str
_failed_addon_reason: str
_picked_firmware_type: PickedFirmwareType
def __init__(self, *args: Any, **kwargs: Any) -> None:
@@ -83,6 +85,8 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
self._hardware_name: str = "unknown" # To be set in a subclass
self._zigbee_integration = ZigbeeIntegration.ZHA
self.addon_install_task: asyncio.Task | None = None
self.addon_start_task: asyncio.Task | None = None
self.addon_uninstall_task: asyncio.Task | None = None
self.firmware_install_task: asyncio.Task[None] | None = None
self.installing_firmware_name: str | None = None
@@ -123,12 +127,8 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
) -> ConfigFlowResult:
"""Pick Thread or Zigbee firmware."""
# Determine if ZHA or Thread are already configured to present migrate options
zha_entries = self.hass.config_entries.async_entries(
ZHA_DOMAIN, include_ignore=False
)
otbr_entries = self.hass.config_entries.async_entries(
OTBR_DOMAIN, include_ignore=False
)
zha_entries = self.hass.config_entries.async_entries(ZHA_DOMAIN)
otbr_entries = self.hass.config_entries.async_entries(OTBR_DOMAIN)
return self.async_show_menu(
step_id="pick_firmware",
@@ -486,6 +486,18 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
"""Install Zigbee firmware."""
raise NotImplementedError
async def async_step_addon_operation_failed(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Abort when add-on installation or start failed."""
return self.async_abort(
reason=self._failed_addon_reason,
description_placeholders={
**self._get_translation_placeholders(),
"addon_name": self._failed_addon_name,
},
)
async def async_step_pre_confirm_zigbee(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -549,12 +561,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
"""Install Thread firmware."""
raise NotImplementedError
@progress_step(
description_placeholders=lambda self: {
**self._get_translation_placeholders(),
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
}
)
async def async_step_install_otbr_addon(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -564,43 +570,70 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
_LOGGER.debug("OTBR addon info: %s", addon_info)
try:
await addon_manager.async_install_addon_waiting()
except AddonError as err:
_LOGGER.error(err)
raise AbortFlow(
"addon_install_failed",
if not self.addon_install_task:
self.addon_install_task = self.hass.async_create_task(
addon_manager.async_install_addon_waiting(),
"OTBR addon install",
)
if not self.addon_install_task.done():
return self.async_show_progress(
step_id="install_otbr_addon",
progress_action="install_addon",
description_placeholders={
**self._get_translation_placeholders(),
"addon_name": addon_manager.addon_name,
},
) from err
progress_task=self.addon_install_task,
)
return await self.async_step_finish_thread_installation()
try:
await self.addon_install_task
except AddonError as err:
_LOGGER.error(err)
self._failed_addon_name = addon_manager.addon_name
self._failed_addon_reason = "addon_install_failed"
return self.async_show_progress_done(next_step_id="addon_operation_failed")
finally:
self.addon_install_task = None
return self.async_show_progress_done(next_step_id="finish_thread_installation")
@progress_step(
description_placeholders=lambda self: {
**self._get_translation_placeholders(),
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
}
)
async def async_step_start_otbr_addon(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Configure OTBR to point to the SkyConnect and run the addon."""
try:
await self._configure_and_start_otbr_addon()
except AddonError as err:
_LOGGER.error(err)
raise AbortFlow(
"addon_start_failed",
otbr_manager = get_otbr_addon_manager(self.hass)
if not self.addon_start_task:
self.addon_start_task = self.hass.async_create_task(
self._configure_and_start_otbr_addon()
)
if not self.addon_start_task.done():
return self.async_show_progress(
step_id="start_otbr_addon",
progress_action="start_otbr_addon",
description_placeholders={
**self._get_translation_placeholders(),
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
"addon_name": otbr_manager.addon_name,
},
) from err
progress_task=self.addon_start_task,
)
return await self.async_step_pre_confirm_otbr()
try:
await self.addon_start_task
except (AddonError, AbortFlow) as err:
_LOGGER.error(err)
self._failed_addon_name = otbr_manager.addon_name
self._failed_addon_reason = (
err.reason if isinstance(err, AbortFlow) else "addon_start_failed"
)
return self.async_show_progress_done(next_step_id="addon_operation_failed")
finally:
self.addon_start_task = None
return self.async_show_progress_done(next_step_id="pre_confirm_otbr")
async def async_step_pre_confirm_otbr(
self, user_input: dict[str, Any] | None = None
@@ -608,6 +641,20 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
"""Pre-confirm OTBR setup."""
# This step is necessary to prevent `user_input` from being passed through
return await self.async_step_confirm_otbr()
async def async_step_confirm_otbr(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm OTBR setup."""
assert self._device is not None
if user_input is None:
return self.async_show_form(
step_id="confirm_otbr",
description_placeholders=self._get_translation_placeholders(),
)
# OTBR discovery is done automatically via hassio
return self._async_flow_finished()

View File

@@ -63,7 +63,7 @@ async def async_get_controller(hass: HomeAssistant) -> Controller:
controller = Controller(
async_zeroconf_instance=async_zeroconf_instance,
bleak_scanner_instance=bleak_scanner_instance,
bleak_scanner_instance=bleak_scanner_instance, # type: ignore[arg-type]
char_cache=char_cache,
)

View File

@@ -3,9 +3,7 @@
from __future__ import annotations
import asyncio
from email.message import Message
import logging
from typing import Any
from aioimaplib import IMAP4_SSL, AioImapException, Response
import voluptuous as vol
@@ -35,7 +33,6 @@ from .coordinator import (
ImapPollingDataUpdateCoordinator,
ImapPushDataUpdateCoordinator,
connect_to_server,
get_parts,
)
from .errors import InvalidAuth, InvalidFolder
@@ -43,7 +40,6 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
CONF_ENTRY = "entry"
CONF_SEEN = "seen"
CONF_PART = "part"
CONF_UID = "uid"
CONF_TARGET_FOLDER = "target_folder"
@@ -68,11 +64,6 @@ SERVICE_MOVE_SCHEMA = _SERVICE_UID_SCHEMA.extend(
)
SERVICE_DELETE_SCHEMA = _SERVICE_UID_SCHEMA
SERVICE_FETCH_TEXT_SCHEMA = _SERVICE_UID_SCHEMA
SERVICE_FETCH_PART_SCHEMA = _SERVICE_UID_SCHEMA.extend(
{
vol.Required(CONF_PART): cv.string,
}
)
type ImapConfigEntry = ConfigEntry[ImapDataUpdateCoordinator]
@@ -225,14 +216,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
translation_placeholders={"error": str(exc)},
) from exc
raise_on_error(response, "fetch_failed")
# Index 1 of of the response lines contains the bytearray with the message data
message = ImapMessage(response.lines[1])
await client.close()
return {
"text": message.text,
"sender": message.sender,
"subject": message.subject,
"parts": get_parts(message.email_message),
"uid": uid,
}
@@ -244,73 +233,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
supports_response=SupportsResponse.ONLY,
)
async def async_fetch_part(call: ServiceCall) -> ServiceResponse:
"""Process fetch email part service and return content."""
@callback
def get_message_part(message: Message, part_key: str) -> Message:
part: Message | Any = message
for index in part_key.split(","):
sub_parts = part.get_payload()
try:
assert isinstance(sub_parts, list)
part = sub_parts[int(index)]
except (AssertionError, ValueError, IndexError) as exc:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_part_index",
) from exc
return part
entry_id: str = call.data[CONF_ENTRY]
uid: str = call.data[CONF_UID]
part_key: str = call.data[CONF_PART]
_LOGGER.debug(
"Fetch part %s for message %s. Entry: %s",
part_key,
uid,
entry_id,
)
client = await async_get_imap_client(hass, entry_id)
try:
response = await client.fetch(uid, "BODY.PEEK[]")
except (TimeoutError, AioImapException) as exc:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="imap_server_fail",
translation_placeholders={"error": str(exc)},
) from exc
raise_on_error(response, "fetch_failed")
# Index 1 of of the response lines contains the bytearray with the message data
message = ImapMessage(response.lines[1])
await client.close()
part_data = get_message_part(message.email_message, part_key)
part_data_content = part_data.get_payload(decode=False)
try:
assert isinstance(part_data_content, str)
except AssertionError as exc:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_part_index",
) from exc
return {
"part_data": part_data_content,
"content_type": part_data.get_content_type(),
"content_transfer_encoding": part_data.get("Content-Transfer-Encoding"),
"filename": part_data.get_filename(),
"part": part_key,
"uid": uid,
}
hass.services.async_register(
DOMAIN,
"fetch_part",
async_fetch_part,
SERVICE_FETCH_PART_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
return True

View File

@@ -21,7 +21,7 @@ from homeassistant.const import (
CONF_VERIFY_SSL,
CONTENT_TYPE_TEXT_PLAIN,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
@@ -209,28 +209,6 @@ class ImapMessage:
return str(self.email_message.get_payload())
@callback
def get_parts(message: Message, prefix: str | None = None) -> dict[str, Any]:
"""Return information about the parts of a multipart message."""
parts: dict[str, Any] = {}
if not message.is_multipart():
return {}
for index, part in enumerate(message.get_payload(), 0):
if TYPE_CHECKING:
assert isinstance(part, Message)
key = f"{prefix},{index}" if prefix else f"{index}"
if part.is_multipart():
parts |= get_parts(part, key)
continue
parts[key] = {"content_type": part.get_content_type()}
if filename := part.get_filename():
parts[key]["filename"] = filename
if content_transfer_encoding := part.get("Content-Transfer-Encoding"):
parts[key]["content_transfer_encoding"] = content_transfer_encoding
return parts
class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
"""Base class for imap client."""
@@ -297,7 +275,6 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
"sender": message.sender,
"subject": message.subject,
"uid": last_message_uid,
"parts": get_parts(message.email_message),
}
data.update({key: getattr(message, key) for key in self._event_data_keys})
if self.custom_event_template is not None:

View File

@@ -21,9 +21,6 @@
},
"fetch": {
"service": "mdi:email-sync-outline"
},
"fetch_part": {
"service": "mdi:email-sync-outline"
}
}
}

View File

@@ -56,22 +56,3 @@ fetch:
example: "12"
selector:
text:
fetch_part:
fields:
entry:
required: true
selector:
config_entry:
integration: "imap"
uid:
required: true
example: "12"
selector:
text:
part:
required: true
example: "0,1"
selector:
text:

View File

@@ -84,9 +84,6 @@
"imap_server_fail": {
"message": "The IMAP server failed to connect: {error}."
},
"invalid_part_index": {
"message": "Invalid part index."
},
"seen_failed": {
"message": "Marking message as seen failed with \"{error}\"."
}
@@ -151,24 +148,6 @@
}
}
},
"fetch_part": {
"name": "Fetch message part",
"description": "Fetches a message part or attachment from an email message.",
"fields": {
"entry": {
"name": "[%key:component::imap::services::fetch::fields::entry::name%]",
"description": "[%key:component::imap::services::fetch::fields::entry::description%]"
},
"uid": {
"name": "[%key:component::imap::services::fetch::fields::uid::name%]",
"description": "[%key:component::imap::services::fetch::fields::uid::description%]"
},
"part": {
"name": "Part",
"description": "The message part index."
}
}
},
"seen": {
"name": "Mark message as seen",
"description": "Marks an email as seen.",

View File

@@ -0,0 +1,28 @@
"""Analytics platform."""
from homeassistant.components.analytics import (
AnalyticsInput,
AnalyticsModifications,
EntityAnalyticsModifications,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
async def async_modify_analytics(
hass: HomeAssistant, analytics_input: AnalyticsInput
) -> AnalyticsModifications:
"""Modify the analytics."""
ent_reg = er.async_get(hass)
entities: dict[str, EntityAnalyticsModifications] = {}
for entity_id in analytics_input.entity_ids:
entity_entry = ent_reg.entities[entity_id]
if entity_entry.capabilities is not None:
capabilities = dict(entity_entry.capabilities)
capabilities["options"] = len(capabilities["options"])
entities[entity_id] = EntityAnalyticsModifications(
capabilities=capabilities
)
return AnalyticsModifications(entities=entities)

View File

@@ -142,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
)
coordinators = LaMarzoccoRuntimeData(
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, cloud_client),
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),

View File

@@ -8,7 +8,7 @@ from datetime import timedelta
import logging
from typing import Any
from pylamarzocco import LaMarzoccoCloudClient, LaMarzoccoMachine
from pylamarzocco import LaMarzoccoMachine
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
from homeassistant.config_entries import ConfigEntry
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
from .const import DOMAIN
SCAN_INTERVAL = timedelta(seconds=60)
SCAN_INTERVAL = timedelta(seconds=15)
SETTINGS_UPDATE_INTERVAL = timedelta(hours=8)
SCHEDULE_UPDATE_INTERVAL = timedelta(minutes=30)
STATISTICS_UPDATE_INTERVAL = timedelta(minutes=15)
@@ -51,7 +51,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
hass: HomeAssistant,
entry: LaMarzoccoConfigEntry,
device: LaMarzoccoMachine,
cloud_client: LaMarzoccoCloudClient | None = None,
) -> None:
"""Initialize coordinator."""
super().__init__(
@@ -62,7 +61,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
update_interval=self._default_update_interval,
)
self.device = device
self.cloud_client = cloud_client
async def _async_update_data(self) -> None:
"""Do the data update."""
@@ -87,17 +85,11 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
"""Class to handle fetching data from the La Marzocco API centrally."""
cloud_client: LaMarzoccoCloudClient
async def _internal_async_update_data(self) -> None:
"""Fetch data from API endpoint."""
# ensure token stays valid; does nothing if token is still valid
await self.cloud_client.async_get_access_token()
if self.device.websocket.connected:
return
await self.device.get_dashboard()
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())

View File

@@ -37,5 +37,5 @@
"iot_class": "cloud_push",
"loggers": ["pylamarzocco"],
"quality_scale": "platinum",
"requirements": ["pylamarzocco==2.1.1"]
"requirements": ["pylamarzocco==2.1.0"]
}

View File

@@ -25,7 +25,6 @@ from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,

View File

@@ -20,14 +20,6 @@
}
}
},
"number": {
"light_brightness": {
"default": "mdi:brightness-5"
},
"plant_days": {
"default": "mdi:calendar-blank"
}
},
"select": {
"display_temperature_unit": {
"default": "mdi:thermometer-lines"

View File

@@ -1,137 +0,0 @@
"""Support for LetPot number entities."""
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import Any
from letpot.deviceclient import LetPotDeviceClient
from letpot.models import DeviceFeature
from homeassistant.components.number import (
NumberEntity,
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import PRECISION_WHOLE, EntityCategory, UnitOfTime
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
from .entity import LetPotEntity, LetPotEntityDescription, exception_handler
# Each change pushes a 'full' device status with the change. The library will cache
# pending changes to avoid overwriting, but try to avoid a lot of parallelism.
PARALLEL_UPDATES = 1
@dataclass(frozen=True, kw_only=True)
class LetPotNumberEntityDescription(LetPotEntityDescription, NumberEntityDescription):
"""Describes a LetPot number entity."""
max_value_fn: Callable[[LetPotDeviceCoordinator], float]
value_fn: Callable[[LetPotDeviceCoordinator], float | None]
set_value_fn: Callable[[LetPotDeviceClient, str, float], Coroutine[Any, Any, None]]
NUMBERS: tuple[LetPotNumberEntityDescription, ...] = (
LetPotNumberEntityDescription(
key="light_brightness_levels",
translation_key="light_brightness",
value_fn=(
lambda coordinator: coordinator.device_client.get_light_brightness_levels(
coordinator.device.serial_number
).index(coordinator.data.light_brightness)
+ 1
if coordinator.data.light_brightness is not None
else None
),
set_value_fn=(
lambda device_client, serial, value: device_client.set_light_brightness(
serial,
device_client.get_light_brightness_levels(serial)[int(value) - 1],
)
),
supported_fn=(
lambda coordinator: DeviceFeature.LIGHT_BRIGHTNESS_LEVELS
in coordinator.device_client.device_info(
coordinator.device.serial_number
).features
),
native_min_value=float(1),
max_value_fn=lambda coordinator: float(
len(
coordinator.device_client.get_light_brightness_levels(
coordinator.device.serial_number
)
)
),
native_step=PRECISION_WHOLE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
LetPotNumberEntityDescription(
key="plant_days",
translation_key="plant_days",
native_unit_of_measurement=UnitOfTime.DAYS,
value_fn=lambda coordinator: coordinator.data.plant_days,
set_value_fn=(
lambda device_client, serial, value: device_client.set_plant_days(
serial, int(value)
)
),
native_min_value=float(0),
max_value_fn=lambda _: float(999),
native_step=PRECISION_WHOLE,
mode=NumberMode.BOX,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: LetPotConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up LetPot number entities based on a config entry and device status/features."""
coordinators = entry.runtime_data
async_add_entities(
LetPotNumberEntity(coordinator, description)
for description in NUMBERS
for coordinator in coordinators
if description.supported_fn(coordinator)
)
class LetPotNumberEntity(LetPotEntity, NumberEntity):
"""Defines a LetPot number entity."""
entity_description: LetPotNumberEntityDescription
def __init__(
self,
coordinator: LetPotDeviceCoordinator,
description: LetPotNumberEntityDescription,
) -> None:
"""Initialize LetPot number entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{coordinator.device.serial_number}_{description.key}"
@property
def native_max_value(self) -> float:
"""Return the maximum available value."""
return self.entity_description.max_value_fn(self.coordinator)
@property
def native_value(self) -> float | None:
"""Return the number value."""
return self.entity_description.value_fn(self.coordinator)
@exception_handler
async def async_set_native_value(self, value: float) -> None:
"""Change the number value."""
return await self.entity_description.set_value_fn(
self.coordinator.device_client,
self.coordinator.device.serial_number,
value,
)

View File

@@ -49,14 +49,6 @@
"name": "Refill error"
}
},
"number": {
"light_brightness": {
"name": "Light brightness"
},
"plant_days": {
"name": "Plants age"
}
},
"select": {
"display_temperature_unit": {
"name": "Temperature unit on display",
@@ -66,7 +58,7 @@
}
},
"light_brightness": {
"name": "[%key:component::letpot::entity::number::light_brightness::name%]",
"name": "Light brightness",
"state": {
"low": "[%key:common::state::low%]",
"high": "[%key:common::state::high%]"

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/libre_hardware_monitor",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["librehardwaremonitor-api==1.4.0"]
"requirements": ["librehardwaremonitor-api==1.3.1"]
}

View File

@@ -28,7 +28,7 @@ rules:
docs-configuration-parameters:
status: done
comment: No options to configure
docs-installation-parameters: done
docs-installation-parameters: todo
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo

View File

@@ -1,5 +1,7 @@
set_hold_time:
target:
device:
integration: lyric
entity:
integration: lyric
domain: climate

View File

@@ -7,9 +7,8 @@ from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionE
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_PORT, CONF_VERIFY_SSL
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
from .const import DOMAIN, LOGGER, MIN_REQUIRED_MEALIE_VERSION
from .utils import create_version
@@ -26,21 +25,13 @@ REAUTH_SCHEMA = vol.Schema(
vol.Required(CONF_API_TOKEN): str,
}
)
DISCOVERY_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_TOKEN): str,
}
)
class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
"""Mealie config flow."""
VERSION = 1
host: str | None = None
verify_ssl: bool = True
_hassio_discovery: dict[str, Any] | None = None
async def check_connection(
self, api_token: str
@@ -152,59 +143,3 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
data_schema=USER_SCHEMA,
errors=errors,
)
async def async_step_hassio(
self, discovery_info: HassioServiceInfo
) -> ConfigFlowResult:
"""Prepare configuration for a Mealie add-on.
This flow is triggered by the discovery component.
"""
await self._async_handle_discovery_without_unique_id()
self._hassio_discovery = discovery_info.config
return await self.async_step_hassio_confirm()
async def async_step_hassio_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm Supervisor discovery and prompt for API token."""
if user_input is None:
return await self._show_hassio_form()
assert self._hassio_discovery
self.host = (
f"{self._hassio_discovery[CONF_HOST]}:{self._hassio_discovery[CONF_PORT]}"
)
self.verify_ssl = True
errors, user_id = await self.check_connection(
user_input[CONF_API_TOKEN],
)
if not errors:
await self.async_set_unique_id(user_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title="Mealie",
data={
CONF_HOST: self.host,
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
CONF_VERIFY_SSL: self.verify_ssl,
},
)
return await self._show_hassio_form(errors)
async def _show_hassio_form(
self, errors: dict[str, str] | None = None
) -> ConfigFlowResult:
"""Show the Hass.io confirmation form to the user."""
assert self._hassio_discovery
return self.async_show_form(
step_id="hassio_confirm",
data_schema=DISCOVERY_SCHEMA,
description_placeholders={"addon": self._hassio_discovery["addon"]},
errors=errors or {},
)

View File

@@ -39,14 +39,8 @@ rules:
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration will only discover a Mealie addon that is local, not on the network.
discovery:
status: done
comment: |
The integration will discover a Mealie addon posting a discovery message.
discovery-update-info: todo
discovery: todo
docs-data-update: done
docs-examples: done
docs-known-limitations: todo

View File

@@ -39,16 +39,6 @@
"api_token": "[%key:component::mealie::common::data_description_api_token%]",
"verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]"
}
},
"hassio_confirm": {
"title": "Mealie via Home Assistant add-on",
"description": "Do you want to configure Home Assistant to connect to the Mealie instance provided by the add-on: {addon}?",
"data": {
"api_token": "[%key:common::config_flow::data::api_token%]"
},
"data_description": {
"api_token": "[%key:component::mealie::common::data_description_api_token%]"
}
}
},
"error": {
@@ -60,7 +50,6 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_account": "You have to use the same account that was used to configure the integration."

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from typing import Protocol
from homeassistant.components import websocket_api
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.integration_platform import (
@@ -73,7 +72,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
# Local sources support
await _process_media_source_platform(hass, DOMAIN, local_source)
hass.http.register_view(local_source.UploadMediaView)
websocket_api.async_register_command(hass, local_source.websocket_remove_media)
await async_process_integration_platforms(
hass, DOMAIN, _process_media_source_platform

Some files were not shown because too many files have changed in this diff Show More