mirror of
https://github.com/home-assistant/core.git
synced 2025-09-27 13:59:29 +00:00
Compare commits
47 Commits
improve-ha
...
rc
Author | SHA1 | Date | |
---|---|---|---|
![]() |
77f897a768 | ||
![]() |
4f0a6ef9a1 | ||
![]() |
66c6b0f5fc | ||
![]() |
dd01243391 | ||
![]() |
66c17e250a | ||
![]() |
723902e233 | ||
![]() |
59fdb9f3b5 | ||
![]() |
d83502514a | ||
![]() |
08e81b2ba6 | ||
![]() |
1e808c965d | ||
![]() |
563b58c9aa | ||
![]() |
cf223880e8 | ||
![]() |
4058ca59ed | ||
![]() |
1386c01733 | ||
![]() |
46504947f7 | ||
![]() |
0a44682014 | ||
![]() |
06a57473a9 | ||
![]() |
fbed66ef1f | ||
![]() |
99a0380ec5 | ||
![]() |
68c51dc7aa | ||
![]() |
3d945b0fc5 | ||
![]() |
7b26a93d38 | ||
![]() |
1b2eab00be | ||
![]() |
750e849f09 | ||
![]() |
6aaddad56b | ||
![]() |
a5af974209 | ||
![]() |
09e45f6f54 | ||
![]() |
d857d8850c | ||
![]() |
ccc50f2412 | ||
![]() |
3905723900 | ||
![]() |
cee88473a2 | ||
![]() |
cdf613d3f8 | ||
![]() |
156a0f1a3d | ||
![]() |
cc2a5b43dd | ||
![]() |
731064f7e9 | ||
![]() |
2f75661c20 | ||
![]() |
be6f056f30 | ||
![]() |
79599e1284 | ||
![]() |
a255585ab6 | ||
![]() |
e9bde225fe | ||
![]() |
d9521ac2a0 | ||
![]() |
d8b24ccccd | ||
![]() |
b4417a76d5 | ||
![]() |
274f6eb54a | ||
![]() |
21a5aaf35c | ||
![]() |
05820a49d0 | ||
![]() |
17b12d29af |
62
.github/workflows/ci.yaml
vendored
62
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -309,7 +309,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -349,7 +349,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -389,7 +389,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -525,7 +525,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
@@ -570,7 +570,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -622,7 +622,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -651,7 +651,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -684,7 +684,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -741,7 +741,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -784,7 +784,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -831,7 +831,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -883,7 +883,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -891,7 +891,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -935,7 +935,7 @@ jobs:
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -967,7 +967,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1009,7 +1009,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1042,7 +1042,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1156,7 +1156,7 @@ jobs:
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1189,7 +1189,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1310,7 +1310,7 @@ jobs:
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1345,7 +1345,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1485,7 +1485,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1518,7 +1518,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -160,7 +160,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.0
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.0
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -4,10 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS8
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
@@ -21,13 +29,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
session = async_get_clientsession(
|
||||
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
|
||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
|
||||
@@ -40,6 +51,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Migrate old config entry."""
|
||||
|
||||
if entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1 and entry.minor_version == 1:
|
||||
new_data = {**entry.data}
|
||||
advanced_data = {
|
||||
CONF_SSL: DEFAULT_SSL,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
}
|
||||
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
@@ -15,10 +15,17 @@ from airos.exceptions import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOS8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -28,6 +35,15 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
|
||||
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
|
||||
}
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -36,6 +52,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ubiquiti airOS."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
@@ -46,13 +63,17 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
session = async_get_clientsession(
|
||||
self.hass,
|
||||
verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
|
@@ -7,3 +7,8 @@ DOMAIN = "airos"
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
MANUFACTURER = "Ubiquiti"
|
||||
|
||||
DEFAULT_VERIFY_SSL = False
|
||||
DEFAULT_SSL = True
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
@@ -2,11 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.const import CONF_HOST, CONF_SSL
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSDataUpdateCoordinator
|
||||
|
||||
|
||||
@@ -20,9 +20,14 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
|
||||
airos_data = self.coordinator.data
|
||||
url_schema = (
|
||||
"https"
|
||||
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
|
||||
else "http"
|
||||
)
|
||||
|
||||
configuration_url: str | None = (
|
||||
f"https://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
@@ -12,6 +12,18 @@
|
||||
"host": "IP address or hostname of the airOS device",
|
||||
"username": "Administrator username for the airOS device, normally 'ubnt'",
|
||||
"password": "Password configured through the UISP app or web interface"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"data": {
|
||||
"ssl": "Use HTTPS",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ssl": "Whether the connection should be encrypted (required for most devices)",
|
||||
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -1308,7 +1308,9 @@ class PipelineRun:
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response, self._device_id
|
||||
conversation_result.response,
|
||||
self._satellite_id,
|
||||
self._device_id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1337,39 +1339,62 @@ class PipelineRun:
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
self,
|
||||
intent_response: intent.IntentResponse,
|
||||
satellite_id: str | None,
|
||||
device_id: str | None,
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
|
||||
or not intent_response.matched_states
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
for state in intent_response.matched_states:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
area_id: str | None = None
|
||||
|
||||
if (
|
||||
satellite_id is not None
|
||||
and (target_entity_entry := entity_registry.async_get(satellite_id))
|
||||
is not None
|
||||
):
|
||||
area_id = target_entity_entry.area_id
|
||||
device_id = target_entity_entry.device_id
|
||||
|
||||
if area_id is None:
|
||||
if device_id is None:
|
||||
return False
|
||||
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
if device_entry is None:
|
||||
return False
|
||||
|
||||
area_id = device_entry.area_id
|
||||
if area_id is None:
|
||||
return False
|
||||
|
||||
for state in intent_response.matched_states:
|
||||
target_entity_entry = entity_registry.async_get(state.entity_id)
|
||||
if target_entity_entry is None:
|
||||
return False
|
||||
|
||||
target_area_id = target_entity_entry.area_id
|
||||
if target_area_id is None:
|
||||
if target_entity_entry.device_id is None:
|
||||
return False
|
||||
|
||||
entity_area_id = entity_device.area_id
|
||||
target_device_entry = device_registry.async_get(
|
||||
target_entity_entry.device_id
|
||||
)
|
||||
if target_device_entry is None:
|
||||
return False
|
||||
|
||||
if entity_area_id != device.area_id:
|
||||
target_area_id = target_device_entry.area_id
|
||||
|
||||
if target_area_id != area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -3,12 +3,16 @@ beolink_allstandby:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
@@ -33,6 +37,8 @@ beolink_join:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
@@ -65,12 +71,16 @@ beolink_leave:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.2"],
|
||||
"requirements": ["hass-nabucasa==1.1.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -6,13 +6,12 @@ from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
|
||||
from homeassistant.const import CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -56,40 +55,19 @@ class DeviceAutomationConditionProtocol(Protocol):
|
||||
class DeviceCondition(Condition):
|
||||
"""Device condition."""
|
||||
|
||||
_hass: HomeAssistant
|
||||
_config: ConfigType
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = await async_validate_device_automation_config(
|
||||
hass,
|
||||
complete_config,
|
||||
cv.DEVICE_CONDITION_SCHEMA,
|
||||
DeviceAutomationType.CONDITION,
|
||||
)
|
||||
# Since we don't want to migrate device conditions to a new format
|
||||
# we just pass the entire config as options.
|
||||
complete_config[CONF_OPTIONS] = complete_config.copy()
|
||||
return complete_config
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config.
|
||||
|
||||
This is here just to satisfy the abstract class interface. It is never called.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
self._hass = hass
|
||||
assert config.options is not None
|
||||
self._config = config.options
|
||||
"""Validate device condition config."""
|
||||
return await async_validate_device_automation_config(
|
||||
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
|
||||
)
|
||||
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Test a device condition."""
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.10.0",
|
||||
"aioesphomeapi==41.11.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -26,11 +26,14 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
|
||||
super().__init__(coordinator)
|
||||
self._serial = serial
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
connections=connections,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
@@ -62,11 +65,14 @@ class EzvizBaseEntity(Entity):
|
||||
self._serial = serial
|
||||
self.coordinator = coordinator
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
connections=connections,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250925.1"]
|
||||
"requirements": ["home-assistant-frontend==20250926.0"]
|
||||
}
|
||||
|
@@ -1,10 +1,8 @@
|
||||
load_url:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
url:
|
||||
example: "https://home-assistant.io"
|
||||
required: true
|
||||
@@ -12,12 +10,10 @@ load_url:
|
||||
text:
|
||||
|
||||
set_config:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
key:
|
||||
example: "motionSensitivity"
|
||||
required: true
|
||||
@@ -30,14 +26,12 @@ set_config:
|
||||
text:
|
||||
|
||||
start_application:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
application:
|
||||
example: "de.ozerov.fully"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
|
@@ -147,10 +147,6 @@
|
||||
"name": "Load URL",
|
||||
"description": "Loads a URL on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "Device ID",
|
||||
"description": "The target device for this action."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "URL to load."
|
||||
@@ -161,10 +157,6 @@
|
||||
"name": "Set configuration",
|
||||
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
},
|
||||
"key": {
|
||||
"name": "Key",
|
||||
"description": "Configuration parameter to set."
|
||||
@@ -182,10 +174,6 @@
|
||||
"application": {
|
||||
"name": "Application",
|
||||
"description": "Package name of the application to start."
|
||||
},
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -77,10 +77,10 @@ class GeniusDevice(GeniusEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update an entity's state data."""
|
||||
if "_state" in self._device.data: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(
|
||||
self._device.data["_state"]["lastComms"]
|
||||
)
|
||||
if (state := self._device.data.get("_state")) and (
|
||||
last_comms := state.get("lastComms")
|
||||
) is not None: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(last_comms)
|
||||
|
||||
|
||||
class GeniusZone(GeniusEntity):
|
||||
|
@@ -1,5 +1,7 @@
|
||||
set_vacation:
|
||||
target:
|
||||
device:
|
||||
integration: google_mail
|
||||
entity:
|
||||
integration: google_mail
|
||||
fields:
|
||||
|
@@ -22,6 +22,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.20.0"],
|
||||
"requirements": ["aiohomeconnect==0.19.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
@@ -32,12 +32,15 @@ set_location:
|
||||
stop:
|
||||
toggle:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_on:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_off:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
update_entity:
|
||||
fields:
|
||||
@@ -50,6 +53,8 @@ update_entity:
|
||||
reload_custom_templates:
|
||||
reload_config_entry:
|
||||
target:
|
||||
entity: {}
|
||||
device: {}
|
||||
fields:
|
||||
entry_id:
|
||||
advanced: true
|
||||
|
@@ -27,12 +27,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -75,10 +69,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -133,21 +129,14 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"flow_title": "{model}",
|
||||
"step": {
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"pick_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
@@ -169,10 +158,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -224,10 +215,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -23,16 +23,12 @@
|
||||
"description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration."
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "Configuring Thread"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "Updating adapter"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "Updating adapter"
|
||||
"title": "Installing OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is being installed."
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "Configuring Thread"
|
||||
"title": "Starting OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is now starting."
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "Failed to set up OpenThread Border Router",
|
||||
@@ -76,9 +72,7 @@
|
||||
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
|
||||
},
|
||||
"progress": {
|
||||
"install_firmware": "Installing {firmware_name} firmware. Do not make any changes to your hardware or software until this finishes.",
|
||||
"install_otbr_addon": "Installing add-on",
|
||||
"start_otbr_addon": "Starting add-on"
|
||||
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -27,12 +27,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -75,10 +69,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -133,10 +129,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
@@ -163,16 +158,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -224,10 +215,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -35,12 +35,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -98,10 +92,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -158,10 +154,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -142,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
)
|
||||
|
||||
coordinators = LaMarzoccoRuntimeData(
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, cloud_client),
|
||||
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
|
||||
|
@@ -8,7 +8,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco import LaMarzoccoCloudClient, LaMarzoccoMachine
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
SETTINGS_UPDATE_INTERVAL = timedelta(hours=8)
|
||||
SCHEDULE_UPDATE_INTERVAL = timedelta(minutes=30)
|
||||
STATISTICS_UPDATE_INTERVAL = timedelta(minutes=15)
|
||||
@@ -51,6 +51,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
hass: HomeAssistant,
|
||||
entry: LaMarzoccoConfigEntry,
|
||||
device: LaMarzoccoMachine,
|
||||
cloud_client: LaMarzoccoCloudClient | None = None,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
@@ -61,6 +62,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
update_interval=self._default_update_interval,
|
||||
)
|
||||
self.device = device
|
||||
self.cloud_client = cloud_client
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Do the data update."""
|
||||
@@ -85,11 +87,17 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Class to handle fetching data from the La Marzocco API centrally."""
|
||||
|
||||
cloud_client: LaMarzoccoCloudClient
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
|
||||
# ensure token stays valid; does nothing if token is still valid
|
||||
await self.cloud_client.async_get_access_token()
|
||||
|
||||
if self.device.websocket.connected:
|
||||
return
|
||||
|
||||
await self.device.get_dashboard()
|
||||
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
|
||||
|
||||
|
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.1.0"]
|
||||
"requirements": ["pylamarzocco==2.1.1"]
|
||||
}
|
||||
|
@@ -28,7 +28,7 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: done
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
|
@@ -1,5 +1,7 @@
|
||||
set_hold_time:
|
||||
target:
|
||||
device:
|
||||
integration: lyric
|
||||
entity:
|
||||
integration: lyric
|
||||
domain: climate
|
||||
|
@@ -11,7 +11,7 @@
|
||||
"_r_to_u": "City/county (R-U)",
|
||||
"_v_to_z": "City/county (V-Z)",
|
||||
"slots": "Maximum warnings per city/county",
|
||||
"headline_filter": "Blacklist regex to filter warning headlines"
|
||||
"headline_filter": "Headline blocklist"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -34,7 +34,7 @@
|
||||
"_v_to_z": "[%key:component::nina::config::step::user::data::_v_to_z%]",
|
||||
"slots": "[%key:component::nina::config::step::user::data::slots%]",
|
||||
"headline_filter": "[%key:component::nina::config::step::user::data::headline_filter%]",
|
||||
"area_filter": "Whitelist regex to filter warnings based on affected areas"
|
||||
"area_filter": "Affected area filter"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from pyportainer import Portainer
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST, Platform
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_VERIFY_SSL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
@@ -19,11 +19,12 @@ type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Set up Portainer from a config entry."""
|
||||
|
||||
session = async_create_clientsession(hass)
|
||||
client = Portainer(
|
||||
api_url=entry.data[CONF_HOST],
|
||||
api_key=entry.data[CONF_API_KEY],
|
||||
session=session,
|
||||
session=async_create_clientsession(
|
||||
hass=hass, verify_ssl=entry.data[CONF_VERIFY_SSL]
|
||||
),
|
||||
)
|
||||
|
||||
coordinator = PortainerCoordinator(hass, entry, client)
|
||||
|
@@ -14,7 +14,7 @@ from pyportainer import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -26,6 +26,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -36,7 +37,7 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
client = Portainer(
|
||||
api_url=data[CONF_HOST],
|
||||
api_key=data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
session=async_get_clientsession(hass=hass, verify_ssl=data[CONF_VERIFY_SSL]),
|
||||
)
|
||||
try:
|
||||
await client.get_endpoints()
|
||||
|
@@ -4,11 +4,13 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The host/URL, including the port, of your Portainer instance",
|
||||
"api_key": "The API key for authenticating with Portainer"
|
||||
"api_key": "The API key for authenticating with Portainer",
|
||||
"verify_ssl": "Whether to verify SSL certificates. Disable only if you have a self-signed certificate"
|
||||
},
|
||||
"description": "You can create an API key in the Portainer UI. Go to **My account > API keys** and select **Add API key**"
|
||||
}
|
||||
|
@@ -610,7 +610,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
|
||||
def _play_media_queue(
|
||||
self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue
|
||||
) -> None:
|
||||
):
|
||||
"""Manage adding, replacing, playing items onto the sonos queue."""
|
||||
_LOGGER.debug(
|
||||
"_play_media_queue item_id [%s] title [%s] enqueue [%s]",
|
||||
@@ -639,7 +639,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
media_type: MediaType | str,
|
||||
media_id: str,
|
||||
enqueue: MediaPlayerEnqueue,
|
||||
) -> None:
|
||||
):
|
||||
"""Play a directory from a music library share."""
|
||||
item = media_browser.get_media(self.media.library, media_id, media_type)
|
||||
if not item:
|
||||
@@ -660,7 +660,6 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
enqueue: MediaPlayerEnqueue,
|
||||
title: str,
|
||||
) -> None:
|
||||
"""Play a sharelink."""
|
||||
share_link = self.coordinator.share_link
|
||||
kwargs = {}
|
||||
if title:
|
||||
|
@@ -24,9 +24,8 @@ restore:
|
||||
|
||||
set_sleep_timer:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
sleep_time:
|
||||
selector:
|
||||
@@ -37,15 +36,13 @@ set_sleep_timer:
|
||||
|
||||
clear_sleep_timer:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
|
||||
play_queue:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
queue_position:
|
||||
selector:
|
||||
@@ -56,9 +53,8 @@ play_queue:
|
||||
|
||||
remove_from_queue:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
queue_position:
|
||||
selector:
|
||||
@@ -75,9 +71,8 @@ get_queue:
|
||||
|
||||
update_alarm:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
alarm_id:
|
||||
required: true
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""The Squeezebox integration."""
|
||||
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
@@ -32,11 +31,11 @@ from homeassistant.helpers.device_registry import (
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
CONF_HTTPS,
|
||||
DISCOVERY_INTERVAL,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
@@ -65,8 +64,6 @@ PLATFORMS = [
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
SQUEEZEBOX_HASS_DATA: HassKey[asyncio.Task] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SqueezeboxData:
|
||||
@@ -243,7 +240,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry)
|
||||
current_entries = hass.config_entries.async_entries(DOMAIN)
|
||||
if len(current_entries) == 1 and current_entries[0] == entry:
|
||||
_LOGGER.debug("Stopping server discovery task")
|
||||
hass.data[SQUEEZEBOX_HASS_DATA].cancel()
|
||||
hass.data.pop(SQUEEZEBOX_HASS_DATA)
|
||||
hass.data[DOMAIN][DISCOVERY_TASK].cancel()
|
||||
hass.data[DOMAIN].pop(DISCOVERY_TASK)
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Constants for the Squeezebox component."""
|
||||
|
||||
CONF_HTTPS = "https"
|
||||
DISCOVERY_TASK = "discovery_task"
|
||||
DOMAIN = "squeezebox"
|
||||
DEFAULT_PORT = 9000
|
||||
PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub"
|
||||
|
@@ -44,7 +44,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import SQUEEZEBOX_HASS_DATA
|
||||
from .browse_media import (
|
||||
BrowseData,
|
||||
build_item_response,
|
||||
@@ -59,6 +58,7 @@ from .const import (
|
||||
CONF_VOLUME_STEP,
|
||||
DEFAULT_BROWSE_LIMIT,
|
||||
DEFAULT_VOLUME_STEP,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
@@ -110,10 +110,12 @@ async def start_server_discovery(hass: HomeAssistant) -> None:
|
||||
},
|
||||
)
|
||||
|
||||
if not hass.data.get(SQUEEZEBOX_HASS_DATA):
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
if DISCOVERY_TASK not in hass.data[DOMAIN]:
|
||||
_LOGGER.debug("Adding server discovery task for squeezebox")
|
||||
hass.data[SQUEEZEBOX_HASS_DATA] = hass.async_create_background_task(
|
||||
async_discover(_discovered_server), name="squeezebox server discovery"
|
||||
hass.data[DOMAIN][DISCOVERY_TASK] = hass.async_create_background_task(
|
||||
async_discover(_discovered_server),
|
||||
name="squeezebox server discovery",
|
||||
)
|
||||
|
||||
|
||||
|
@@ -3,18 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, cast
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.const import CONF_CONDITION, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
condition_trace_set_result,
|
||||
condition_trace_update_result,
|
||||
trace_condition_function,
|
||||
@@ -23,22 +21,20 @@ from homeassistant.helpers.sun import get_astral_event_date
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
|
||||
_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "sun",
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
|
||||
),
|
||||
vol.Optional("after_offset"): cv.time_period,
|
||||
}
|
||||
),
|
||||
vol.Optional("after_offset"): cv.time_period,
|
||||
}
|
||||
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): vol.All(
|
||||
_OPTIONS_SCHEMA_DICT,
|
||||
cv.has_at_least_one_key("before", "after"),
|
||||
)
|
||||
}
|
||||
cv.has_at_least_one_key("before", "after"),
|
||||
)
|
||||
|
||||
|
||||
@@ -129,36 +125,24 @@ def sun(
|
||||
class SunCondition(Condition):
|
||||
"""Sun condition."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _CONDITION_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Wrap action method with sun based condition."""
|
||||
before = self._options.get("before")
|
||||
after = self._options.get("after")
|
||||
before_offset = self._options.get("before_offset")
|
||||
after_offset = self._options.get("after_offset")
|
||||
before = self._config.get("before")
|
||||
after = self._config.get("after")
|
||||
before_offset = self._config.get("before_offset")
|
||||
after_offset = self._config.get("after_offset")
|
||||
|
||||
@trace_condition_function
|
||||
def sun_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
|
@@ -5,7 +5,11 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import onboarding
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -18,14 +22,18 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_import(self, import_data: None) -> ConfigFlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
await self.async_set_unique_id(
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID, raise_on_progress=False
|
||||
)
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
await self.async_set_unique_id(
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID, raise_on_progress=False
|
||||
)
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_zeroconf(
|
||||
|
@@ -8,5 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==2.7.0", "pyroute2==0.7.5"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_meshcop._udp.local."]
|
||||
}
|
||||
|
@@ -28,12 +28,11 @@ async def async_setup_entry(
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
entities: list[UptimeRobotBinarySensor] = []
|
||||
for monitor in coordinator.data:
|
||||
if monitor.id in known_devices:
|
||||
continue
|
||||
known_devices.add(monitor.id)
|
||||
entities.append(
|
||||
current_devices = {monitor.id for monitor in coordinator.data}
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
UptimeRobotBinarySensor(
|
||||
coordinator,
|
||||
BinarySensorEntityDescription(
|
||||
@@ -42,9 +41,9 @@ async def async_setup_entry(
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in coordinator.data
|
||||
if monitor.id in new_devices
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
_check_device()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
@@ -141,7 +141,9 @@ class VeSyncFanHA(VeSyncBaseEntity, FanEntity):
|
||||
attr["active_time"] = self.device.state.active_time
|
||||
|
||||
if hasattr(self.device.state, "display_status"):
|
||||
attr["display_status"] = self.device.state.display_status.value
|
||||
attr["display_status"] = getattr(
|
||||
self.device.state.display_status, "value", None
|
||||
)
|
||||
|
||||
if hasattr(self.device.state, "child_lock"):
|
||||
attr["child_lock"] = self.device.state.child_lock
|
||||
|
@@ -2,16 +2,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_GPS_ACCURACY,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_CONDITION,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_ZONE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
@@ -19,22 +17,26 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import ConditionErrorContainer, ConditionErrorMessage
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
|
||||
from . import in_zone
|
||||
|
||||
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
}
|
||||
_CONDITION_SCHEMA = vol.Schema({CONF_OPTIONS: _OPTIONS_SCHEMA_DICT})
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "zone",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("event"): vol.Any("enter", "leave"),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def zone(
|
||||
@@ -93,34 +95,21 @@ def zone(
|
||||
class ZoneCondition(Condition):
|
||||
"""Zone condition."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _CONDITION_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Wrap action method with zone based condition."""
|
||||
entity_ids = self._options.get(CONF_ENTITY_ID, [])
|
||||
zone_entity_ids = self._options.get(CONF_ZONE, [])
|
||||
entity_ids = self._config.get(CONF_ENTITY_ID, [])
|
||||
zone_entity_ids = self._config.get(CONF_ZONE, [])
|
||||
|
||||
@trace_condition_function
|
||||
def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
|
@@ -376,10 +376,10 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
new_addon_config = addon_config | config_updates
|
||||
|
||||
if not new_addon_config[CONF_ADDON_DEVICE]:
|
||||
new_addon_config.pop(CONF_ADDON_DEVICE)
|
||||
if not new_addon_config[CONF_ADDON_SOCKET]:
|
||||
new_addon_config.pop(CONF_ADDON_SOCKET)
|
||||
if new_addon_config.get(CONF_ADDON_DEVICE) is None:
|
||||
new_addon_config.pop(CONF_ADDON_DEVICE, None)
|
||||
if new_addon_config.get(CONF_ADDON_SOCKET) is None:
|
||||
new_addon_config.pop(CONF_ADDON_SOCKET, None)
|
||||
|
||||
if new_addon_config == addon_config:
|
||||
return
|
||||
@@ -1470,14 +1470,33 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not is_hassio(self.hass):
|
||||
return self.async_abort(reason="not_hassio")
|
||||
|
||||
if discovery_info.zwave_home_id:
|
||||
await self.async_set_unique_id(str(discovery_info.zwave_home_id))
|
||||
self._abort_if_unique_id_configured(
|
||||
{
|
||||
CONF_USB_PATH: None,
|
||||
CONF_SOCKET_PATH: discovery_info.socket_path,
|
||||
}
|
||||
if (
|
||||
discovery_info.zwave_home_id
|
||||
and (
|
||||
current_config_entries := self._async_current_entries(
|
||||
include_ignore=False
|
||||
)
|
||||
)
|
||||
and (home_id := str(discovery_info.zwave_home_id))
|
||||
and (
|
||||
existing_entry := next(
|
||||
(
|
||||
entry
|
||||
for entry in current_config_entries
|
||||
if entry.unique_id == home_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
)
|
||||
# Only update existing entries that are configured via sockets
|
||||
and existing_entry.data.get(CONF_SOCKET_PATH)
|
||||
):
|
||||
await self._async_set_addon_config(
|
||||
{CONF_ADDON_SOCKET: discovery_info.socket_path}
|
||||
)
|
||||
# Reloading will sync add-on options to config entry data
|
||||
self.hass.config_entries.async_schedule_reload(existing_entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
self.socket_path = discovery_info.socket_path
|
||||
self.context["title_placeholders"] = {
|
||||
|
@@ -21,7 +21,6 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
@@ -29,6 +28,7 @@ from homeassistant.helpers.trigger import (
|
||||
TriggerConfig,
|
||||
TriggerData,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@@ -20,13 +20,13 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
TriggerActionType,
|
||||
TriggerConfig,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@@ -25,8 +25,8 @@ if TYPE_CHECKING:
|
||||
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 11
|
||||
PATCH_VERSION: Final = "0.dev0"
|
||||
MINOR_VERSION: Final = 10
|
||||
PATCH_VERSION: Final = "0b4"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
@@ -6807,7 +6807,8 @@
|
||||
"name": "Thread",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
"iot_class": "local_polling",
|
||||
"single_config_entry": true
|
||||
},
|
||||
"tibber": {
|
||||
"name": "Tibber",
|
||||
|
@@ -1,13 +1,5 @@
|
||||
"""Helpers for automation."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
|
||||
from .typing import ConfigType
|
||||
|
||||
|
||||
def get_absolute_description_key(domain: str, key: str) -> str:
|
||||
"""Return the absolute description key."""
|
||||
@@ -27,26 +19,3 @@ def get_relative_description_key(domain: str, key: str) -> str:
|
||||
if not subtype:
|
||||
return "_"
|
||||
return subtype[0]
|
||||
|
||||
|
||||
def move_top_level_schema_fields_to_options(
|
||||
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
|
||||
) -> ConfigType:
|
||||
"""Move top-level fields to options.
|
||||
|
||||
This function is used to help migrating old-style configs to new-style configs.
|
||||
If options is already present, the config is returned as-is.
|
||||
"""
|
||||
if CONF_OPTIONS in config:
|
||||
return config
|
||||
|
||||
config = config.copy()
|
||||
options = config.setdefault(CONF_OPTIONS, {})
|
||||
|
||||
# Move top-level fields to options
|
||||
for key_marked in options_schema_dict:
|
||||
key = key_marked.schema
|
||||
if key in config:
|
||||
options[key] = config.pop(key)
|
||||
|
||||
return config
|
||||
|
@@ -6,7 +6,6 @@ import abc
|
||||
from collections import deque
|
||||
from collections.abc import Callable, Container, Coroutine, Generator, Iterable
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, time as dt_time, timedelta
|
||||
import functools as ft
|
||||
import inspect
|
||||
@@ -31,10 +30,8 @@ from homeassistant.const import (
|
||||
CONF_FOR,
|
||||
CONF_ID,
|
||||
CONF_MATCH,
|
||||
CONF_OPTIONS,
|
||||
CONF_SELECTOR,
|
||||
CONF_STATE,
|
||||
CONF_TARGET,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
CONF_WEEKDAY,
|
||||
ENTITY_MATCH_ALL,
|
||||
@@ -114,17 +111,17 @@ CONDITIONS: HassKey[dict[str, str]] = HassKey("conditions")
|
||||
|
||||
# Basic schemas to sanity check the condition descriptions,
|
||||
# full validation is done by hassfest.conditions
|
||||
_FIELD_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_FIELD_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SELECTOR): selector.validate_selector,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_CONDITION_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("target"): TargetSelector.CONFIG_SCHEMA,
|
||||
vol.Optional("fields"): vol.Schema({str: _FIELD_DESCRIPTION_SCHEMA}),
|
||||
vol.Optional("fields"): vol.Schema({str: _FIELD_SCHEMA}),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
@@ -137,10 +134,10 @@ def starts_with_dot(key: str) -> str:
|
||||
return key
|
||||
|
||||
|
||||
_CONDITIONS_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_CONDITIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Remove(vol.All(str, starts_with_dot)): object,
|
||||
cv.underscore_slug: vol.Any(None, _CONDITION_DESCRIPTION_SCHEMA),
|
||||
cv.underscore_slug: vol.Any(None, _CONDITION_SCHEMA),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -202,43 +199,11 @@ async def _register_condition_platform(
|
||||
_LOGGER.exception("Error while notifying condition platform listener")
|
||||
|
||||
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): str,
|
||||
vol.Optional(CONF_OPTIONS): object,
|
||||
vol.Optional(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Condition(abc.ABC):
|
||||
"""Condition class."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config.
|
||||
|
||||
The complete config includes fields that are generic to all conditions,
|
||||
such as the alias.
|
||||
This method should be overridden by conditions that need to migrate
|
||||
from the old-style config.
|
||||
"""
|
||||
complete_config = _CONDITION_SCHEMA(complete_config)
|
||||
|
||||
specific_config: ConfigType = {}
|
||||
for key in (CONF_OPTIONS, CONF_TARGET):
|
||||
if key in complete_config:
|
||||
specific_config[key] = complete_config.pop(key)
|
||||
specific_config = await cls.async_validate_config(hass, specific_config)
|
||||
|
||||
for key in (CONF_OPTIONS, CONF_TARGET):
|
||||
if key in specific_config:
|
||||
complete_config[key] = specific_config[key]
|
||||
|
||||
return complete_config
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -247,9 +212,6 @@ class Condition(abc.ABC):
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Get the condition checker."""
|
||||
@@ -264,14 +226,6 @@ class ConditionProtocol(Protocol):
|
||||
"""Return the conditions provided by this integration."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ConditionConfig:
|
||||
"""Condition config."""
|
||||
|
||||
options: dict[str, Any] | None = None
|
||||
target: dict[str, Any] | None = None
|
||||
|
||||
|
||||
type ConditionCheckerType = Callable[[HomeAssistant, TemplateVarsType], bool | None]
|
||||
|
||||
|
||||
@@ -401,15 +355,8 @@ async def async_from_config(
|
||||
relative_condition_key = get_relative_description_key(
|
||||
platform_domain, condition_key
|
||||
)
|
||||
condition_cls = condition_descriptors[relative_condition_key]
|
||||
condition = condition_cls(
|
||||
hass,
|
||||
ConditionConfig(
|
||||
options=config.get(CONF_OPTIONS),
|
||||
target=config.get(CONF_TARGET),
|
||||
),
|
||||
)
|
||||
return await condition.async_get_checker()
|
||||
condition_instance = condition_descriptors[relative_condition_key](hass, config)
|
||||
return await condition_instance.async_get_checker()
|
||||
|
||||
for fmt in (ASYNC_FROM_CONFIG_FORMAT, FROM_CONFIG_FORMAT):
|
||||
factory = getattr(sys.modules[__name__], fmt.format(condition_key), None)
|
||||
@@ -1042,9 +989,9 @@ async def async_validate_condition_config(
|
||||
)
|
||||
if not (condition_class := condition_descriptors.get(relative_condition_key)):
|
||||
raise vol.Invalid(f"Invalid condition '{condition_key}' specified")
|
||||
return await condition_class.async_validate_complete_config(hass, config)
|
||||
return await condition_class.async_validate_config(hass, config)
|
||||
|
||||
if condition_key in ("numeric_state", "state"):
|
||||
if platform is None and condition_key in ("numeric_state", "state"):
|
||||
validator = cast(
|
||||
Callable[[HomeAssistant, ConfigType], ConfigType],
|
||||
getattr(
|
||||
@@ -1164,7 +1111,7 @@ def _load_conditions_file(integration: Integration) -> dict[str, Any]:
|
||||
try:
|
||||
return cast(
|
||||
dict[str, Any],
|
||||
_CONDITIONS_DESCRIPTION_SCHEMA(
|
||||
_CONDITIONS_SCHEMA(
|
||||
load_yaml_dict(str(integration.file_path / "conditions.yaml"))
|
||||
),
|
||||
)
|
||||
|
@@ -1545,6 +1545,9 @@ STATE_CONDITION_BASE_SCHEMA = {
|
||||
),
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
vol.Optional(CONF_FOR): positive_time_period_template,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("from"): str,
|
||||
}
|
||||
|
||||
STATE_CONDITION_STATE_SCHEMA = vol.Schema(
|
||||
|
@@ -401,6 +401,29 @@ class PluggableAction:
|
||||
await task
|
||||
|
||||
|
||||
def move_top_level_schema_fields_to_options(
|
||||
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
|
||||
) -> ConfigType:
|
||||
"""Move top-level fields to options.
|
||||
|
||||
This function is used to help migrating old-style configs to new-style configs.
|
||||
If options is already present, the config is returned as-is.
|
||||
"""
|
||||
if CONF_OPTIONS in config:
|
||||
return config
|
||||
|
||||
config = config.copy()
|
||||
options = config.setdefault(CONF_OPTIONS, {})
|
||||
|
||||
# Move top-level fields to options
|
||||
for key_marked in options_schema_dict:
|
||||
key = key_marked.schema
|
||||
if key in config:
|
||||
options[key] = config.pop(key)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
async def _async_get_trigger_platform(
|
||||
hass: HomeAssistant, trigger_key: str
|
||||
) -> tuple[str, TriggerProtocol]:
|
||||
|
@@ -36,10 +36,10 @@ fnv-hash-fast==1.5.0
|
||||
go2rtc-client==0.2.1
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==5.6.4
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
hassil==3.2.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20250925.1
|
||||
home-assistant-frontend==20250926.0
|
||||
home-assistant-intents==2025.9.24
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.11.0.dev0"
|
||||
version = "2025.10.0b4"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -47,7 +47,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.5.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==1.1.2",
|
||||
"hass-nabucasa==1.1.1",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
|
2
requirements.txt
generated
2
requirements.txt
generated
@@ -22,7 +22,7 @@ certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.6
|
||||
fnv-hash-fast==1.5.0
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
httpx==0.28.1
|
||||
home-assistant-bluetooth==1.13.1
|
||||
ifaddr==0.2.0
|
||||
|
10
requirements_all.txt
generated
10
requirements_all.txt
generated
@@ -247,7 +247,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.10.0
|
||||
aioesphomeapi==41.11.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -268,7 +268,7 @@ aioharmony==0.5.3
|
||||
aiohasupervisor==0.3.3b0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.20.0
|
||||
aiohomeconnect==0.19.0
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.18
|
||||
@@ -1145,7 +1145,7 @@ habiticalib==0.4.5
|
||||
habluetooth==5.6.4
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@@ -1186,7 +1186,7 @@ hole==0.9.0
|
||||
holidays==0.81
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250925.1
|
||||
home-assistant-frontend==20250926.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.9.24
|
||||
@@ -2132,7 +2132,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.1.0
|
||||
pylamarzocco==2.1.1
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
|
10
requirements_test_all.txt
generated
10
requirements_test_all.txt
generated
@@ -235,7 +235,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.10.0
|
||||
aioesphomeapi==41.11.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -253,7 +253,7 @@ aioharmony==0.5.3
|
||||
aiohasupervisor==0.3.3b0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.20.0
|
||||
aiohomeconnect==0.19.0
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.18
|
||||
@@ -1006,7 +1006,7 @@ habiticalib==0.4.5
|
||||
habluetooth==5.6.4
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
|
||||
# homeassistant.components.assist_satellite
|
||||
# homeassistant.components.conversation
|
||||
@@ -1035,7 +1035,7 @@ hole==0.9.0
|
||||
holidays==0.81
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250925.1
|
||||
home-assistant-frontend==20250926.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.9.24
|
||||
@@ -1777,7 +1777,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.1.0
|
||||
pylamarzocco==2.1.1
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Common fixtures for the Ubiquiti airOS tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from airos.airos8 import AirOS8Data
|
||||
import pytest
|
||||
@@ -28,22 +28,26 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
yield mock_setup_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_airos_class() -> Generator[MagicMock]:
|
||||
"""Fixture to mock the AirOS class itself."""
|
||||
with (
|
||||
patch("homeassistant.components.airos.AirOS8", autospec=True) as mock_class,
|
||||
patch("homeassistant.components.airos.config_flow.AirOS8", new=mock_class),
|
||||
patch("homeassistant.components.airos.coordinator.AirOS8", new=mock_class),
|
||||
):
|
||||
yield mock_class
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_airos_client(
|
||||
request: pytest.FixtureRequest, ap_fixture: AirOS8Data
|
||||
mock_airos_class: MagicMock, ap_fixture: AirOS8Data
|
||||
) -> Generator[AsyncMock]:
|
||||
"""Fixture to mock the AirOS API client."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.airos.config_flow.AirOS8", autospec=True
|
||||
) as mock_airos,
|
||||
patch("homeassistant.components.airos.coordinator.AirOS8", new=mock_airos),
|
||||
patch("homeassistant.components.airos.AirOS8", new=mock_airos),
|
||||
):
|
||||
client = mock_airos.return_value
|
||||
client.status.return_value = ap_fixture
|
||||
client.login.return_value = True
|
||||
yield client
|
||||
client = mock_airos_class.return_value
|
||||
client.status.return_value = ap_fixture
|
||||
client.login.return_value = True
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@@ -632,6 +632,10 @@
|
||||
}),
|
||||
}),
|
||||
'entry_data': dict({
|
||||
'advanced_settings': dict({
|
||||
'ssl': True,
|
||||
'verify_ssl': False,
|
||||
}),
|
||||
'host': '**REDACTED**',
|
||||
'password': '**REDACTED**',
|
||||
'username': 'ubnt',
|
||||
|
@@ -10,9 +10,15 @@ from airos.exceptions import (
|
||||
)
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airos.const import DOMAIN
|
||||
from homeassistant.components.airos.const import DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
@@ -22,6 +28,10 @@ MOCK_CONFIG = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: {
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +43,8 @@ async def test_form_creates_entry(
|
||||
) -> None:
|
||||
"""Test we get the form and create the appropriate entry."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
169
tests/components/airos/test_init.py
Normal file
169
tests/components/airos/test_init.py
Normal file
@@ -0,0 +1,169 @@
|
||||
"""Test for airOS integration setup."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import ANY, MagicMock
|
||||
|
||||
from homeassistant.components.airos.const import (
|
||||
DEFAULT_SSL,
|
||||
DEFAULT_VERIFY_SSL,
|
||||
DOMAIN,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_USER, ConfigEntryState
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
MOCK_CONFIG_V1 = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
}
|
||||
|
||||
MOCK_CONFIG_PLAIN = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: {
|
||||
CONF_SSL: False,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
|
||||
MOCK_CONFIG_V1_2 = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: {
|
||||
CONF_SSL: DEFAULT_SSL,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def test_setup_entry_with_default_ssl(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_airos_client: MagicMock,
|
||||
mock_airos_class: MagicMock,
|
||||
) -> None:
|
||||
"""Test setting up a config entry with default SSL options."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
mock_airos_class.assert_called_once_with(
|
||||
host=mock_config_entry.data[CONF_HOST],
|
||||
username=mock_config_entry.data[CONF_USERNAME],
|
||||
password=mock_config_entry.data[CONF_PASSWORD],
|
||||
session=ANY,
|
||||
use_ssl=DEFAULT_SSL,
|
||||
)
|
||||
|
||||
assert mock_config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] is True
|
||||
assert mock_config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] is False
|
||||
|
||||
|
||||
async def test_setup_entry_without_ssl(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: MagicMock,
|
||||
mock_airos_class: MagicMock,
|
||||
) -> None:
|
||||
"""Test setting up a config entry adjusted to plain HTTP."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=MOCK_CONFIG_PLAIN,
|
||||
entry_id="1",
|
||||
unique_id="airos_device",
|
||||
version=1,
|
||||
minor_version=2,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
|
||||
mock_airos_class.assert_called_once_with(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=ANY,
|
||||
use_ssl=False,
|
||||
)
|
||||
|
||||
assert entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] is False
|
||||
assert entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] is False
|
||||
|
||||
|
||||
async def test_migrate_entry(hass: HomeAssistant, mock_airos_client: MagicMock) -> None:
|
||||
"""Test migrate entry unique id."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
source=SOURCE_USER,
|
||||
data=MOCK_CONFIG_V1,
|
||||
entry_id="1",
|
||||
unique_id="airos_device",
|
||||
version=1,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
assert entry.version == 1
|
||||
assert entry.minor_version == 2
|
||||
assert entry.data == MOCK_CONFIG_V1_2
|
||||
|
||||
|
||||
async def test_migrate_future_return(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test migrate entry unique id."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
source=SOURCE_USER,
|
||||
data=MOCK_CONFIG_V1_2,
|
||||
entry_id="1",
|
||||
unique_id="airos_device",
|
||||
version=2,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is ConfigEntryState.MIGRATION_ERROR
|
||||
|
||||
|
||||
async def test_load_unload_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test setup and unload config entry."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
|
@@ -123,8 +123,6 @@ async def test_dynamic_device(
|
||||
assert (state := hass.states.get(entity_id_1))
|
||||
assert state.state == STATE_ON
|
||||
|
||||
assert not hass.states.get(entity_id_2)
|
||||
|
||||
mock_amazon_devices_client.get_devices_data.return_value = {
|
||||
TEST_DEVICE_1_SN: TEST_DEVICE_1,
|
||||
TEST_DEVICE_2_SN: TEST_DEVICE_2,
|
||||
|
@@ -1797,6 +1797,7 @@ async def test_chat_log_tts_streaming(
|
||||
assert process_events(events) == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("use_satellite_entity"), [True, False])
|
||||
async def test_acknowledge(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
@@ -1805,6 +1806,7 @@ async def test_acknowledge(
|
||||
entity_registry: er.EntityRegistry,
|
||||
area_registry: ar.AreaRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
use_satellite_entity: bool,
|
||||
) -> None:
|
||||
"""Test that acknowledge sound is played when targets are in the same area."""
|
||||
area_1 = area_registry.async_get_or_create("area_1")
|
||||
@@ -1819,12 +1821,16 @@ async def test_acknowledge(
|
||||
|
||||
entry = MockConfigEntry()
|
||||
entry.add_to_hass(hass)
|
||||
satellite = device_registry.async_get_or_create(
|
||||
|
||||
satellite = entity_registry.async_get_or_create("assist_satellite", "test", "1234")
|
||||
entity_registry.async_update_entity(satellite.entity_id, area_id=area_1.id)
|
||||
|
||||
satellite_device = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections=set(),
|
||||
identifiers={("demo", "id-1234")},
|
||||
)
|
||||
device_registry.async_update_device(satellite.id, area_id=area_1.id)
|
||||
device_registry.async_update_device(satellite_device.id, area_id=area_1.id)
|
||||
|
||||
events: list[assist_pipeline.PipelineEvent] = []
|
||||
turn_on = async_mock_service(hass, "light", "turn_on")
|
||||
@@ -1837,7 +1843,8 @@ async def test_acknowledge(
|
||||
pipeline_input = assist_pipeline.pipeline.PipelineInput(
|
||||
intent_input=text,
|
||||
session=mock_chat_session,
|
||||
device_id=satellite.id,
|
||||
satellite_id=satellite.entity_id if use_satellite_entity else None,
|
||||
device_id=satellite_device.id if not use_satellite_entity else None,
|
||||
run=assist_pipeline.pipeline.PipelineRun(
|
||||
hass,
|
||||
context=Context(),
|
||||
@@ -1889,7 +1896,8 @@ async def test_acknowledge(
|
||||
)
|
||||
|
||||
# 3. Remove satellite device area
|
||||
device_registry.async_update_device(satellite.id, area_id=None)
|
||||
entity_registry.async_update_entity(satellite.entity_id, area_id=None)
|
||||
device_registry.async_update_device(satellite_device.id, area_id=None)
|
||||
|
||||
_reset()
|
||||
await _run("turn on light 1")
|
||||
@@ -1900,7 +1908,8 @@ async def test_acknowledge(
|
||||
assert len(turn_on) == 1
|
||||
|
||||
# Restore
|
||||
device_registry.async_update_device(satellite.id, area_id=area_1.id)
|
||||
entity_registry.async_update_entity(satellite.entity_id, area_id=area_1.id)
|
||||
device_registry.async_update_device(satellite_device.id, area_id=area_1.id)
|
||||
|
||||
# 4. Check device area instead of entity area
|
||||
light_device = device_registry.async_get_or_create(
|
||||
|
@@ -8,13 +8,14 @@ from pyportainer.models.portainer import Endpoint
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.portainer.const import DOMAIN
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_VERIFY_SSL
|
||||
|
||||
from tests.common import MockConfigEntry, load_json_array_fixture
|
||||
|
||||
MOCK_TEST_CONFIG = {
|
||||
CONF_HOST: "https://127.0.0.1:9000/",
|
||||
CONF_API_KEY: "test_api_key",
|
||||
CONF_VERIFY_SSL: True,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -83,10 +83,7 @@ async def test_if_action_before_sunrise_no_offset(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "before": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -159,10 +156,7 @@ async def test_if_action_after_sunrise_no_offset(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "after": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -237,10 +231,8 @@ async def test_if_action_before_sunrise_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {
|
||||
"before": SUN_EVENT_SUNRISE,
|
||||
"before_offset": "+1:00:00",
|
||||
},
|
||||
"before": SUN_EVENT_SUNRISE,
|
||||
"before_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -364,7 +356,8 @@ async def test_if_action_before_sunset_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": "sunset", "before_offset": "+1:00:00"},
|
||||
"before": "sunset",
|
||||
"before_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -488,7 +481,8 @@ async def test_if_action_after_sunrise_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE, "after_offset": "+1:00:00"},
|
||||
"after": SUN_EVENT_SUNRISE,
|
||||
"after_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -636,7 +630,8 @@ async def test_if_action_after_sunset_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": "sunset", "after_offset": "+1:00:00"},
|
||||
"after": "sunset",
|
||||
"after_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -712,7 +707,8 @@ async def test_if_action_after_and_before_during(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE, "before": SUN_EVENT_SUNSET},
|
||||
"after": SUN_EVENT_SUNRISE,
|
||||
"before": SUN_EVENT_SUNSET,
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -816,7 +812,8 @@ async def test_if_action_before_or_after_during(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNRISE, "after": SUN_EVENT_SUNSET},
|
||||
"before": SUN_EVENT_SUNRISE,
|
||||
"after": SUN_EVENT_SUNSET,
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -944,10 +941,7 @@ async def test_if_action_before_sunrise_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "before": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -1026,10 +1020,7 @@ async def test_if_action_after_sunrise_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "after": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -1108,10 +1099,7 @@ async def test_if_action_before_sunset_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNSET},
|
||||
},
|
||||
"condition": {"condition": "sun", "before": SUN_EVENT_SUNSET},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -1190,10 +1178,7 @@ async def test_if_action_after_sunset_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNSET},
|
||||
},
|
||||
"condition": {"condition": "sun", "after": SUN_EVENT_SUNSET},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
|
@@ -3,6 +3,8 @@
|
||||
from ipaddress import ip_address
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import thread
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
@@ -56,14 +58,18 @@ async def test_import(hass: HomeAssistant) -> None:
|
||||
assert config_entry.unique_id is None
|
||||
|
||||
|
||||
async def test_import_then_zeroconf(hass: HomeAssistant) -> None:
|
||||
"""Test the import flow."""
|
||||
@pytest.mark.parametrize("source", ["import", "user"])
|
||||
async def test_single_instance_allowed_zeroconf(
|
||||
hass: HomeAssistant,
|
||||
source: str,
|
||||
) -> None:
|
||||
"""Test zeroconf single instance allowed abort reason."""
|
||||
with patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "import"}
|
||||
thread.DOMAIN, context={"source": source}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
@@ -77,7 +83,7 @@ async def test_import_then_zeroconf(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
assert len(mock_setup_entry.mock_calls) == 0
|
||||
|
||||
|
||||
@@ -152,8 +158,45 @@ async def test_zeroconf_setup_onboarding(hass: HomeAssistant) -> None:
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_zeroconf_then_import(hass: HomeAssistant) -> None:
|
||||
"""Test the import flow."""
|
||||
@pytest.mark.parametrize(
|
||||
("first_source", "second_source"), [("import", "user"), ("user", "import")]
|
||||
)
|
||||
async def test_import_and_user(
|
||||
hass: HomeAssistant,
|
||||
first_source: str,
|
||||
second_source: str,
|
||||
) -> None:
|
||||
"""Test single instance allowed for user and import."""
|
||||
with patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": first_source}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": second_source}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
assert len(mock_setup_entry.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source", ["import", "user"])
|
||||
async def test_zeroconf_then_import_user(
|
||||
hass: HomeAssistant,
|
||||
source: str,
|
||||
) -> None:
|
||||
"""Test single instance allowed abort reason for import/user flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "zeroconf"}, data=TEST_ZEROCONF_RECORD
|
||||
)
|
||||
@@ -169,9 +212,37 @@ async def test_zeroconf_then_import(hass: HomeAssistant) -> None:
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "import"}
|
||||
thread.DOMAIN, context={"source": source}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
assert len(mock_setup_entry.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source", ["import", "user"])
|
||||
async def test_zeroconf_in_progress_then_import_user(
|
||||
hass: HomeAssistant,
|
||||
source: str,
|
||||
) -> None:
|
||||
"""Test priority (import/user) flow with zeroconf flow in progress."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": "zeroconf"}, data=TEST_ZEROCONF_RECORD
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "confirm"
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.thread.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
thread.DOMAIN, context={"source": source}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert mock_setup_entry.call_count == 1
|
||||
|
||||
flows_in_progress = hass.config_entries.flow.async_progress()
|
||||
assert len(flows_in_progress) == 0
|
||||
|
@@ -12,7 +12,8 @@ async def test_zone_raises(hass: HomeAssistant) -> None:
|
||||
"""Test that zone raises ConditionError on errors."""
|
||||
config = {
|
||||
"condition": "zone",
|
||||
"options": {"entity_id": "device_tracker.cat", "zone": "zone.home"},
|
||||
"entity_id": "device_tracker.cat",
|
||||
"zone": "zone.home",
|
||||
}
|
||||
config = cv.CONDITION_SCHEMA(config)
|
||||
config = await condition.async_validate_condition_config(hass, config)
|
||||
@@ -65,10 +66,8 @@ async def test_zone_raises(hass: HomeAssistant) -> None:
|
||||
|
||||
config = {
|
||||
"condition": "zone",
|
||||
"options": {
|
||||
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
},
|
||||
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
}
|
||||
config = cv.CONDITION_SCHEMA(config)
|
||||
config = await condition.async_validate_condition_config(hass, config)
|
||||
@@ -103,10 +102,8 @@ async def test_zone_multiple_entities(hass: HomeAssistant) -> None:
|
||||
{
|
||||
"alias": "Zone Condition",
|
||||
"condition": "zone",
|
||||
"options": {
|
||||
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
|
||||
"zone": "zone.home",
|
||||
},
|
||||
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
|
||||
"zone": "zone.home",
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -164,10 +161,8 @@ async def test_multiple_zones(hass: HomeAssistant) -> None:
|
||||
"conditions": [
|
||||
{
|
||||
"condition": "zone",
|
||||
"options": {
|
||||
"entity_id": "device_tracker.person",
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
},
|
||||
"entity_id": "device_tracker.person",
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
@@ -1290,6 +1290,49 @@ async def test_esphome_discovery(
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("supervisor", "addon_installed", "addon_info")
|
||||
async def test_esphome_discovery_already_configured(
|
||||
hass: HomeAssistant,
|
||||
set_addon_options: AsyncMock,
|
||||
addon_options: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test ESPHome discovery success path."""
|
||||
addon_options[CONF_ADDON_SOCKET] = "esphome://existing-device:6053"
|
||||
addon_options["another_key"] = "should_not_be_touched"
|
||||
|
||||
entry = MockConfigEntry(
|
||||
entry_id="mock-entry-id",
|
||||
domain=DOMAIN,
|
||||
data={CONF_SOCKET_PATH: "esphome://existing-device:6053"},
|
||||
title=TITLE,
|
||||
unique_id="1234",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
with patch.object(hass.config_entries, "async_schedule_reload") as mock_reload:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_ESPHOME},
|
||||
data=ESPHOME_DISCOVERY_INFO,
|
||||
)
|
||||
|
||||
mock_reload.assert_called_once_with(entry.entry_id)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
# Addon got updated
|
||||
assert set_addon_options.call_args == call(
|
||||
"core_zwave_js",
|
||||
AddonsOptions(
|
||||
config={
|
||||
"socket": "esphome://192.168.1.100:6053",
|
||||
"another_key": "should_not_be_touched",
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("supervisor", "addon_installed")
|
||||
async def test_discovery_addon_not_running(
|
||||
hass: HomeAssistant,
|
||||
|
@@ -1,12 +1,10 @@
|
||||
"""Test automation helpers."""
|
||||
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.helpers.automation import (
|
||||
get_absolute_description_key,
|
||||
get_relative_description_key,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
|
||||
|
||||
@@ -36,73 +34,3 @@ def test_relative_description_key(relative_key: str, absolute_key: str) -> None:
|
||||
"""Test relative description key."""
|
||||
DOMAIN = "homeassistant"
|
||||
assert get_relative_description_key(DOMAIN, absolute_key) == relative_key
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config", "schema_dict", "expected_config"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{},
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
"options": {},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{
|
||||
vol.Required("entity"): str,
|
||||
vol.Optional("from"): str,
|
||||
vol.Optional("to"): str,
|
||||
vol.Optional("for"): dict,
|
||||
vol.Optional("attribute"): str,
|
||||
vol.Optional("value_template"): str,
|
||||
},
|
||||
{
|
||||
"platform": "test",
|
||||
"extra_field": "extra_value",
|
||||
"options": {
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_move_schema_fields_to_options(
|
||||
config, schema_dict, expected_config
|
||||
) -> None:
|
||||
"""Test moving schema fields to options."""
|
||||
assert (
|
||||
move_top_level_schema_fields_to_options(config, schema_dict) == expected_config
|
||||
)
|
||||
|
@@ -32,13 +32,6 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
trace,
|
||||
)
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
async_validate_condition_config,
|
||||
)
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import Integration, async_get_integration
|
||||
@@ -2112,9 +2105,12 @@ async def test_platform_async_get_conditions(hass: HomeAssistant) -> None:
|
||||
async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
"""Test a condition platform with multiple conditions."""
|
||||
|
||||
class MockCondition(Condition):
|
||||
class MockCondition(condition.Condition):
|
||||
"""Mock condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
@@ -2122,24 +2118,23 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
"""Validate config."""
|
||||
return config
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
class MockCondition1(MockCondition):
|
||||
"""Mock condition 1."""
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Evaluate state based on configuration."""
|
||||
return lambda hass, vars: True
|
||||
|
||||
class MockCondition2(MockCondition):
|
||||
"""Mock condition 2."""
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Evaluate state based on configuration."""
|
||||
return lambda hass, vars: False
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
async def async_get_conditions(
|
||||
hass: HomeAssistant,
|
||||
) -> dict[str, type[condition.Condition]]:
|
||||
return {
|
||||
"_": MockCondition1,
|
||||
"cond_2": MockCondition2,
|
||||
@@ -2153,12 +2148,12 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
config_1 = {CONF_CONDITION: "test"}
|
||||
config_2 = {CONF_CONDITION: "test.cond_2"}
|
||||
config_3 = {CONF_CONDITION: "test.unknown_cond"}
|
||||
assert await async_validate_condition_config(hass, config_1) == config_1
|
||||
assert await async_validate_condition_config(hass, config_2) == config_2
|
||||
assert await condition.async_validate_condition_config(hass, config_1) == config_1
|
||||
assert await condition.async_validate_condition_config(hass, config_2) == config_2
|
||||
with pytest.raises(
|
||||
vol.Invalid, match="Invalid condition 'test.unknown_cond' specified"
|
||||
):
|
||||
await async_validate_condition_config(hass, config_3)
|
||||
await condition.async_validate_condition_config(hass, config_3)
|
||||
|
||||
cond_func = await condition.async_from_config(hass, config_1)
|
||||
assert cond_func(hass, {}) is True
|
||||
@@ -2170,74 +2165,6 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
await condition.async_from_config(hass, config_3)
|
||||
|
||||
|
||||
async def test_platform_migrate_trigger(hass: HomeAssistant) -> None:
|
||||
"""Test a condition platform with a migration."""
|
||||
|
||||
OPTIONS_SCHEMA_DICT = {
|
||||
vol.Required("option_1"): str,
|
||||
vol.Optional("option_2"): int,
|
||||
}
|
||||
|
||||
class MockCondition(Condition):
|
||||
"""Mock condition."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return config
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
return {
|
||||
"_": MockCondition,
|
||||
}
|
||||
|
||||
mock_integration(hass, MockModule("test"))
|
||||
mock_platform(
|
||||
hass, "test.condition", Mock(async_get_conditions=async_get_conditions)
|
||||
)
|
||||
|
||||
config_1 = {
|
||||
"condition": "test",
|
||||
"option_1": "value_1",
|
||||
"option_2": 2,
|
||||
}
|
||||
config_2 = {
|
||||
"condition": "test",
|
||||
"option_1": "value_1",
|
||||
}
|
||||
config_1_migrated = {
|
||||
"condition": "test",
|
||||
"options": {"option_1": "value_1", "option_2": 2},
|
||||
}
|
||||
config_2_migrated = {
|
||||
"condition": "test",
|
||||
"options": {"option_1": "value_1"},
|
||||
}
|
||||
|
||||
assert await async_validate_condition_config(hass, config_1) == config_1_migrated
|
||||
assert await async_validate_condition_config(hass, config_2) == config_2_migrated
|
||||
assert (
|
||||
await async_validate_condition_config(hass, config_1_migrated)
|
||||
== config_1_migrated
|
||||
)
|
||||
assert (
|
||||
await async_validate_condition_config(hass, config_2_migrated)
|
||||
== config_2_migrated
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enabled_value", [True, "{{ 1 == 1 }}"])
|
||||
async def test_enabled_condition(
|
||||
hass: HomeAssistant, enabled_value: bool | str
|
||||
|
@@ -19,7 +19,6 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import trigger
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.trigger import (
|
||||
DATA_PLUGGABLE_ACTIONS,
|
||||
PluggableAction,
|
||||
@@ -30,6 +29,7 @@ from homeassistant.helpers.trigger import (
|
||||
_async_get_trigger_platform,
|
||||
async_initialize_triggers,
|
||||
async_validate_trigger_config,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import Integration, async_get_integration
|
||||
@@ -449,6 +449,76 @@ async def test_pluggable_action(
|
||||
assert not plug_2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config", "schema_dict", "expected_config"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{},
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
"options": {},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{
|
||||
vol.Required("entity"): str,
|
||||
vol.Optional("from"): str,
|
||||
vol.Optional("to"): str,
|
||||
vol.Optional("for"): dict,
|
||||
vol.Optional("attribute"): str,
|
||||
vol.Optional("value_template"): str,
|
||||
},
|
||||
{
|
||||
"platform": "test",
|
||||
"extra_field": "extra_value",
|
||||
"options": {
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_move_schema_fields_to_options(
|
||||
config, schema_dict, expected_config
|
||||
) -> None:
|
||||
"""Test moving schema fields to options."""
|
||||
assert (
|
||||
move_top_level_schema_fields_to_options(config, schema_dict) == expected_config
|
||||
)
|
||||
|
||||
|
||||
async def test_platform_multiple_triggers(hass: HomeAssistant) -> None:
|
||||
"""Test a trigger platform with multiple trigger."""
|
||||
|
||||
|
Reference in New Issue
Block a user