mirror of
https://github.com/home-assistant/core.git
synced 2025-09-30 23:39:38 +00:00
Compare commits
82 Commits
mqtt-subsc
...
uptimerobo
Author | SHA1 | Date | |
---|---|---|---|
![]() |
80285fd3e6 | ||
![]() |
ab4086be18 | ||
![]() |
de6c3512d2 | ||
![]() |
36dc1e938a | ||
![]() |
07a78cf6f7 | ||
![]() |
eaa673e0c3 | ||
![]() |
f2c4ca081f | ||
![]() |
e3d707f0b4 | ||
![]() |
fb93fed2e5 | ||
![]() |
95dfc2f23d | ||
![]() |
408df2093a | ||
![]() |
f32bf0cc3e | ||
![]() |
dbbe3145b6 | ||
![]() |
f8bf3ea2ef | ||
![]() |
053bd31d43 | ||
![]() |
1aefc3f37a | ||
![]() |
3de955d9ce | ||
![]() |
0ff88fd366 | ||
![]() |
eb84020773 | ||
![]() |
4bbfea3c7c | ||
![]() |
63d4fb7558 | ||
![]() |
953895cd81 | ||
![]() |
a6c3f4efc0 | ||
![]() |
11e880d034 | ||
![]() |
e4d6bdb398 | ||
![]() |
6ced1783e3 | ||
![]() |
8051f78d10 | ||
![]() |
b724176b23 | ||
![]() |
fdca16ea92 | ||
![]() |
f8fd8b432a | ||
![]() |
9148ae70ce | ||
![]() |
447cb26d28 | ||
![]() |
2af36465f6 | ||
![]() |
d5f7265424 | ||
![]() |
cc16af7f2d | ||
![]() |
7a4d75bc44 | ||
![]() |
ec0380fd3b | ||
![]() |
b17cc71dfb | ||
![]() |
89b327ed7b | ||
![]() |
9bf361a1b8 | ||
![]() |
d11c171c75 | ||
![]() |
c523c45d17 | ||
![]() |
c1b9c0e1b6 | ||
![]() |
487b9ff03e | ||
![]() |
ec62b0cdfb | ||
![]() |
6d0470064f | ||
![]() |
7450b3fd1a | ||
![]() |
5b70910d77 | ||
![]() |
52de5ff5ff | ||
![]() |
c4389a1679 | ||
![]() |
35faaa6cae | ||
![]() |
3c0b13975a | ||
![]() |
bc88696339 | ||
![]() |
8f99c3f64a | ||
![]() |
88016d96d4 | ||
![]() |
47df73b18f | ||
![]() |
1c12d2b8cd | ||
![]() |
eb38837a8c | ||
![]() |
159c7fbfd1 | ||
![]() |
7ee31f0884 | ||
![]() |
0c5e12571a | ||
![]() |
9db973217f | ||
![]() |
cf1a745283 | ||
![]() |
834e3f1963 | ||
![]() |
3f8f7573c9 | ||
![]() |
0ae272f1f6 | ||
![]() |
8774295e2e | ||
![]() |
0c8d2594ef | ||
![]() |
205bd2676b | ||
![]() |
25849fd9cc | ||
![]() |
7d6eac9ff7 | ||
![]() |
31017ebc98 | ||
![]() |
724a7b0ecc | ||
![]() |
91e13d447a | ||
![]() |
7c8ad9d535 | ||
![]() |
9cd3ab853d | ||
![]() |
0b0f8c5829 | ||
![]() |
ae7bc7fb1b | ||
![]() |
09750872b5 | ||
![]() |
076e51017b | ||
![]() |
95e7b00996 | ||
![]() |
ddecf1ac21 |
62
.github/workflows/ci.yaml
vendored
62
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -309,7 +309,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -349,7 +349,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -389,7 +389,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -525,7 +525,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
@@ -570,7 +570,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -622,7 +622,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -651,7 +651,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -684,7 +684,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -741,7 +741,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -784,7 +784,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -831,7 +831,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -883,7 +883,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -891,7 +891,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -935,7 +935,7 @@ jobs:
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -967,7 +967,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1009,7 +1009,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1042,7 +1042,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1156,7 +1156,7 @@ jobs:
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1189,7 +1189,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1310,7 +1310,7 @@ jobs:
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1345,7 +1345,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1485,7 +1485,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1518,7 +1518,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
uses: github/codeql-action/init@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
uses: github/codeql-action/analyze@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -160,7 +160,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.1"]
|
||||
"requirements": ["accuweather==4.2.2"]
|
||||
}
|
||||
|
@@ -4,10 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS8
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
@@ -21,13 +29,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
session = async_get_clientsession(
|
||||
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
|
||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
|
||||
@@ -40,6 +51,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Migrate old config entry."""
|
||||
|
||||
if entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1 and entry.minor_version == 1:
|
||||
new_data = {**entry.data}
|
||||
advanced_data = {
|
||||
CONF_SSL: DEFAULT_SSL,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
}
|
||||
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
@@ -15,10 +15,17 @@ from airos.exceptions import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOS8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -28,6 +35,15 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
|
||||
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
|
||||
}
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -36,6 +52,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ubiquiti airOS."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
@@ -46,13 +63,17 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
session = async_get_clientsession(
|
||||
self.hass,
|
||||
verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
|
@@ -7,3 +7,8 @@ DOMAIN = "airos"
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
MANUFACTURER = "Ubiquiti"
|
||||
|
||||
DEFAULT_VERIFY_SSL = False
|
||||
DEFAULT_SSL = True
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
@@ -2,11 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.const import CONF_HOST, CONF_SSL
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSDataUpdateCoordinator
|
||||
|
||||
|
||||
@@ -20,9 +20,14 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
|
||||
airos_data = self.coordinator.data
|
||||
url_schema = (
|
||||
"https"
|
||||
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
|
||||
else "http"
|
||||
)
|
||||
|
||||
configuration_url: str | None = (
|
||||
f"https://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.1"]
|
||||
"requirements": ["airos==0.5.3"]
|
||||
}
|
||||
|
@@ -12,6 +12,18 @@
|
||||
"host": "IP address or hostname of the airOS device",
|
||||
"username": "Administrator username for the airOS device, normally 'ubnt'",
|
||||
"password": "Password configured through the UISP app or web interface"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"data": {
|
||||
"ssl": "Use HTTPS",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ssl": "Whether the connection should be encrypted (required for most devices)",
|
||||
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -10,6 +10,7 @@ from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
@@ -20,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_update_unique_id
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -31,6 +33,7 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice, str], bool]
|
||||
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||
|
||||
|
||||
BINARY_SENSORS: Final = (
|
||||
@@ -41,46 +44,15 @@ BINARY_SENSORS: Final = (
|
||||
is_on_fn=lambda device, _: device.online,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="bluetooth",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda device, _: device.bluetooth_state,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="babyCryDetectionState",
|
||||
translation_key="baby_cry_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="beepingApplianceDetectionState",
|
||||
translation_key="beeping_appliance_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="coughDetectionState",
|
||||
translation_key="cough_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="dogBarkDetectionState",
|
||||
translation_key="dog_bark_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="humanPresenceDetectionState",
|
||||
key="detectionState",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="waterSoundsDetectionState",
|
||||
translation_key="water_sounds_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_on_fn=lambda device, key: bool(
|
||||
device.sensors[key].value != SENSOR_STATE_OFF
|
||||
),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
is_available_fn=lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -94,6 +66,15 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "detectionState" binary sensor
|
||||
await async_update_unique_id(
|
||||
hass,
|
||||
coordinator,
|
||||
BINARY_SENSOR_DOMAIN,
|
||||
"humanPresenceDetectionState",
|
||||
"detectionState",
|
||||
)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -125,3 +106,13 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
||||
return self.entity_description.is_on_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
|
@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except (CannotAuthenticate, TypeError) as err:
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
|
@@ -60,7 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"do not disturb": device.do_not_disturb,
|
||||
"response style": device.response_style,
|
||||
"bluetooth state": device.bluetooth_state,
|
||||
"sensors": device.sensors,
|
||||
}
|
||||
|
@@ -1,44 +1,4 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"default": "mdi:bluetooth-off",
|
||||
"state": {
|
||||
"on": "mdi:bluetooth"
|
||||
}
|
||||
},
|
||||
"baby_cry_detection": {
|
||||
"default": "mdi:account-voice-off",
|
||||
"state": {
|
||||
"on": "mdi:account-voice"
|
||||
}
|
||||
},
|
||||
"beeping_appliance_detection": {
|
||||
"default": "mdi:bell-off",
|
||||
"state": {
|
||||
"on": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"cough_detection": {
|
||||
"default": "mdi:blur-off",
|
||||
"state": {
|
||||
"on": "mdi:blur"
|
||||
}
|
||||
},
|
||||
"dog_bark_detection": {
|
||||
"default": "mdi:dog-side-off",
|
||||
"state": {
|
||||
"on": "mdi:dog-side"
|
||||
}
|
||||
},
|
||||
"water_sounds_detection": {
|
||||
"default": "mdi:water-pump-off",
|
||||
"state": {
|
||||
"on": "mdi:water-pump"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_sound": {
|
||||
"service": "mdi:cast-audio"
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioamazondevices==6.0.0"]
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.6"]
|
||||
}
|
||||
|
@@ -31,6 +31,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
||||
"""Amazon Devices sensor entity description."""
|
||||
|
||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
|
||||
|
||||
SENSORS: Final = (
|
||||
@@ -99,3 +102,13 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.device.sensors[self.entity_description.key].value
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -58,26 +58,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth"
|
||||
},
|
||||
"baby_cry_detection": {
|
||||
"name": "Baby crying"
|
||||
},
|
||||
"beeping_appliance_detection": {
|
||||
"name": "Beeping appliance"
|
||||
},
|
||||
"cough_detection": {
|
||||
"name": "Coughing"
|
||||
},
|
||||
"dog_bark_detection": {
|
||||
"name": "Dog barking"
|
||||
},
|
||||
"water_sounds_detection": {
|
||||
"name": "Water sounds"
|
||||
}
|
||||
},
|
||||
"notify": {
|
||||
"speak": {
|
||||
"name": "Speak"
|
||||
|
@@ -8,13 +8,17 @@ from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import alexa_api_call
|
||||
from .utils import alexa_api_call, async_update_unique_id
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -24,16 +28,17 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Alexa Devices switch entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
subkey: str
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
method: str
|
||||
|
||||
|
||||
SWITCHES: Final = (
|
||||
AmazonSwitchEntityDescription(
|
||||
key="do_not_disturb",
|
||||
subkey="AUDIO_PLAYER",
|
||||
key="dnd",
|
||||
translation_key="do_not_disturb",
|
||||
is_on_fn=lambda _device: _device.do_not_disturb,
|
||||
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
|
||||
method="set_do_not_disturb",
|
||||
),
|
||||
)
|
||||
@@ -48,6 +53,11 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "DND" switch and remove from Speaker Group
|
||||
await async_update_unique_id(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -59,7 +69,7 @@ async def async_setup_entry(
|
||||
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
|
||||
for switch_desc in SWITCHES
|
||||
for serial_num in new_devices
|
||||
if switch_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
if switch_desc.key in coordinator.data[serial_num].sensors
|
||||
)
|
||||
|
||||
_check_device()
|
||||
@@ -94,3 +104,13 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if switch is on."""
|
||||
return self.entity_description.is_on_fn(self.device)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -6,9 +6,12 @@ from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import AmazonDevicesCoordinator
|
||||
from .entity import AmazonEntity
|
||||
|
||||
|
||||
@@ -38,3 +41,23 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
) from err
|
||||
|
||||
return cmd_wrapper
|
||||
|
||||
|
||||
async def async_update_unique_id(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
domain: str,
|
||||
old_key: str,
|
||||
new_key: str,
|
||||
) -> None:
|
||||
"""Update unique id for entities created with old format."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{old_key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
|
||||
_LOGGER.debug("Updating unique_id for %s", entity_id)
|
||||
new_unique_id = unique_id.replace(old_key, new_key)
|
||||
|
||||
# Update the registry with the new unique_id
|
||||
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||
|
@@ -1308,7 +1308,9 @@ class PipelineRun:
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response, self._device_id
|
||||
conversation_result.response,
|
||||
self._satellite_id,
|
||||
self._device_id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1337,39 +1339,62 @@ class PipelineRun:
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
self,
|
||||
intent_response: intent.IntentResponse,
|
||||
satellite_id: str | None,
|
||||
device_id: str | None,
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
|
||||
or not intent_response.matched_states
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
for state in intent_response.matched_states:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
area_id: str | None = None
|
||||
|
||||
if (
|
||||
satellite_id is not None
|
||||
and (target_entity_entry := entity_registry.async_get(satellite_id))
|
||||
is not None
|
||||
):
|
||||
area_id = target_entity_entry.area_id
|
||||
device_id = target_entity_entry.device_id
|
||||
|
||||
if area_id is None:
|
||||
if device_id is None:
|
||||
return False
|
||||
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
if device_entry is None:
|
||||
return False
|
||||
|
||||
area_id = device_entry.area_id
|
||||
if area_id is None:
|
||||
return False
|
||||
|
||||
for state in intent_response.matched_states:
|
||||
target_entity_entry = entity_registry.async_get(state.entity_id)
|
||||
if target_entity_entry is None:
|
||||
return False
|
||||
|
||||
target_area_id = target_entity_entry.area_id
|
||||
if target_area_id is None:
|
||||
if target_entity_entry.device_id is None:
|
||||
return False
|
||||
|
||||
entity_area_id = entity_device.area_id
|
||||
target_device_entry = device_registry.async_get(
|
||||
target_entity_entry.device_id
|
||||
)
|
||||
if target_device_entry is None:
|
||||
return False
|
||||
|
||||
if entity_area_id != device.area_id:
|
||||
target_area_id = target_device_entry.area_id
|
||||
|
||||
if target_area_id != area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -3,16 +3,12 @@ beolink_allstandby:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
@@ -37,8 +33,6 @@ beolink_join:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
@@ -71,16 +65,12 @@ beolink_leave:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.1"],
|
||||
"requirements": ["hass-nabucasa==1.1.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -25,23 +25,27 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .utils import async_client_session
|
||||
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = 111111
|
||||
DEFAULT_PIN = "111111"
|
||||
|
||||
|
||||
pin_regex = r"^[0-9]{4,10}$"
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
|
||||
)
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
}
|
||||
|
@@ -6,12 +6,13 @@ from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_DOMAIN
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -55,19 +56,40 @@ class DeviceAutomationConditionProtocol(Protocol):
|
||||
class DeviceCondition(Condition):
|
||||
"""Device condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
_hass: HomeAssistant
|
||||
_config: ConfigType
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = await async_validate_device_automation_config(
|
||||
hass,
|
||||
complete_config,
|
||||
cv.DEVICE_CONDITION_SCHEMA,
|
||||
DeviceAutomationType.CONDITION,
|
||||
)
|
||||
# Since we don't want to migrate device conditions to a new format
|
||||
# we just pass the entire config as options.
|
||||
complete_config[CONF_OPTIONS] = complete_config.copy()
|
||||
return complete_config
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate device condition config."""
|
||||
return await async_validate_device_automation_config(
|
||||
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
|
||||
)
|
||||
"""Validate config.
|
||||
|
||||
This is here just to satisfy the abstract class interface. It is never called.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
self._hass = hass
|
||||
assert config.options is not None
|
||||
self._config = config.options
|
||||
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Test a device condition."""
|
||||
|
@@ -57,6 +57,7 @@ from .manager import async_replace_device
|
||||
|
||||
ERROR_REQUIRES_ENCRYPTION_KEY = "requires_encryption_key"
|
||||
ERROR_INVALID_ENCRYPTION_KEY = "invalid_psk"
|
||||
ERROR_INVALID_PASSWORD_AUTH = "invalid_auth"
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ZERO_NOISE_PSK = "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA="
|
||||
@@ -137,6 +138,11 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self._password = ""
|
||||
return await self._async_authenticate_or_add()
|
||||
|
||||
if error == ERROR_INVALID_PASSWORD_AUTH or (
|
||||
error is None and self._device_info and self._device_info.uses_password
|
||||
):
|
||||
return await self.async_step_authenticate()
|
||||
|
||||
if error is None and entry_data.get(CONF_NOISE_PSK):
|
||||
# Device was configured with encryption but now connects without it.
|
||||
# Check if it's the same device before offering to remove encryption.
|
||||
@@ -690,13 +696,15 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
cli = APIClient(
|
||||
host,
|
||||
port or DEFAULT_PORT,
|
||||
"",
|
||||
self._password or "",
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
noise_psk=noise_psk,
|
||||
)
|
||||
try:
|
||||
await cli.connect()
|
||||
self._device_info = await cli.device_info()
|
||||
except InvalidAuthAPIError:
|
||||
return ERROR_INVALID_PASSWORD_AUTH
|
||||
except RequiresEncryptionAPIError:
|
||||
return ERROR_REQUIRES_ENCRYPTION_KEY
|
||||
except InvalidEncryptionKeyAPIError as ex:
|
||||
|
@@ -372,6 +372,9 @@ class ESPHomeManager:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
try:
|
||||
await self._on_connect()
|
||||
except InvalidAuthAPIError as err:
|
||||
_LOGGER.warning("Authentication failed for %s: %s", self.host, err)
|
||||
await self._start_reauth_and_disconnect()
|
||||
except APIConnectionError as err:
|
||||
_LOGGER.warning(
|
||||
"Error getting setting up connection for %s: %s", self.host, err
|
||||
@@ -641,7 +644,14 @@ class ESPHomeManager:
|
||||
if self.reconnect_logic:
|
||||
await self.reconnect_logic.stop()
|
||||
return
|
||||
await self._start_reauth_and_disconnect()
|
||||
|
||||
async def _start_reauth_and_disconnect(self) -> None:
|
||||
"""Start reauth flow and stop reconnection attempts."""
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
await self.cli.disconnect()
|
||||
if self.reconnect_logic:
|
||||
await self.reconnect_logic.stop()
|
||||
|
||||
async def _handle_dynamic_encryption_key(
|
||||
self, device_info: EsphomeDeviceInfo
|
||||
@@ -1063,7 +1073,7 @@ def _async_register_service(
|
||||
service_name,
|
||||
{
|
||||
"description": (
|
||||
f"Calls the service {service.name} of the node {device_info.name}"
|
||||
f"Performs the action {service.name} of the node {device_info.name}"
|
||||
),
|
||||
"fields": fields,
|
||||
},
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.9.0",
|
||||
"aioesphomeapi==41.11.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -26,11 +26,14 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
|
||||
super().__init__(coordinator)
|
||||
self._serial = serial
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
connections=connections,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
@@ -62,11 +65,14 @@ class EzvizBaseEntity(Entity):
|
||||
self._serial = serial
|
||||
self.coordinator = coordinator
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
connections=connections,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250924.0"]
|
||||
"requirements": ["home-assistant-frontend==20250926.0"]
|
||||
}
|
||||
|
@@ -1,8 +1,10 @@
|
||||
load_url:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
url:
|
||||
example: "https://home-assistant.io"
|
||||
required: true
|
||||
@@ -10,10 +12,12 @@ load_url:
|
||||
text:
|
||||
|
||||
set_config:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
key:
|
||||
example: "motionSensitivity"
|
||||
required: true
|
||||
@@ -26,12 +30,14 @@ set_config:
|
||||
text:
|
||||
|
||||
start_application:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
application:
|
||||
example: "de.ozerov.fully"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
|
@@ -147,6 +147,10 @@
|
||||
"name": "Load URL",
|
||||
"description": "Loads a URL on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "Device ID",
|
||||
"description": "The target device for this action."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "URL to load."
|
||||
@@ -157,6 +161,10 @@
|
||||
"name": "Set configuration",
|
||||
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
},
|
||||
"key": {
|
||||
"name": "Key",
|
||||
"description": "Configuration parameter to set."
|
||||
@@ -174,6 +182,10 @@
|
||||
"application": {
|
||||
"name": "Application",
|
||||
"description": "Package name of the application to start."
|
||||
},
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -77,10 +77,10 @@ class GeniusDevice(GeniusEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update an entity's state data."""
|
||||
if "_state" in self._device.data: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(
|
||||
self._device.data["_state"]["lastComms"]
|
||||
)
|
||||
if (state := self._device.data.get("_state")) and (
|
||||
last_comms := state.get("lastComms")
|
||||
) is not None: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(last_comms)
|
||||
|
||||
|
||||
class GeniusZone(GeniusEntity):
|
||||
|
@@ -1,7 +1,5 @@
|
||||
set_vacation:
|
||||
target:
|
||||
device:
|
||||
integration: google_mail
|
||||
entity:
|
||||
integration: google_mail
|
||||
fields:
|
||||
|
@@ -22,6 +22,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.19.0"],
|
||||
"requirements": ["aiohomeconnect==0.20.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
@@ -32,15 +32,12 @@ set_location:
|
||||
stop:
|
||||
toggle:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_on:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_off:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
update_entity:
|
||||
fields:
|
||||
@@ -53,8 +50,6 @@ update_entity:
|
||||
reload_custom_templates:
|
||||
reload_config_entry:
|
||||
target:
|
||||
entity: {}
|
||||
device: {}
|
||||
fields:
|
||||
entry_id:
|
||||
advanced: true
|
||||
|
@@ -3,7 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from email.message import Message
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioimaplib import IMAP4_SSL, AioImapException, Response
|
||||
import voluptuous as vol
|
||||
@@ -33,6 +35,7 @@ from .coordinator import (
|
||||
ImapPollingDataUpdateCoordinator,
|
||||
ImapPushDataUpdateCoordinator,
|
||||
connect_to_server,
|
||||
get_parts,
|
||||
)
|
||||
from .errors import InvalidAuth, InvalidFolder
|
||||
|
||||
@@ -40,6 +43,7 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
CONF_ENTRY = "entry"
|
||||
CONF_SEEN = "seen"
|
||||
CONF_PART = "part"
|
||||
CONF_UID = "uid"
|
||||
CONF_TARGET_FOLDER = "target_folder"
|
||||
|
||||
@@ -64,6 +68,11 @@ SERVICE_MOVE_SCHEMA = _SERVICE_UID_SCHEMA.extend(
|
||||
)
|
||||
SERVICE_DELETE_SCHEMA = _SERVICE_UID_SCHEMA
|
||||
SERVICE_FETCH_TEXT_SCHEMA = _SERVICE_UID_SCHEMA
|
||||
SERVICE_FETCH_PART_SCHEMA = _SERVICE_UID_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PART): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
type ImapConfigEntry = ConfigEntry[ImapDataUpdateCoordinator]
|
||||
|
||||
@@ -216,12 +225,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
translation_placeholders={"error": str(exc)},
|
||||
) from exc
|
||||
raise_on_error(response, "fetch_failed")
|
||||
# Index 1 of of the response lines contains the bytearray with the message data
|
||||
message = ImapMessage(response.lines[1])
|
||||
await client.close()
|
||||
return {
|
||||
"text": message.text,
|
||||
"sender": message.sender,
|
||||
"subject": message.subject,
|
||||
"parts": get_parts(message.email_message),
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
@@ -233,6 +244,73 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
async def async_fetch_part(call: ServiceCall) -> ServiceResponse:
|
||||
"""Process fetch email part service and return content."""
|
||||
|
||||
@callback
|
||||
def get_message_part(message: Message, part_key: str) -> Message:
|
||||
part: Message | Any = message
|
||||
for index in part_key.split(","):
|
||||
sub_parts = part.get_payload()
|
||||
try:
|
||||
assert isinstance(sub_parts, list)
|
||||
part = sub_parts[int(index)]
|
||||
except (AssertionError, ValueError, IndexError) as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_part_index",
|
||||
) from exc
|
||||
|
||||
return part
|
||||
|
||||
entry_id: str = call.data[CONF_ENTRY]
|
||||
uid: str = call.data[CONF_UID]
|
||||
part_key: str = call.data[CONF_PART]
|
||||
_LOGGER.debug(
|
||||
"Fetch part %s for message %s. Entry: %s",
|
||||
part_key,
|
||||
uid,
|
||||
entry_id,
|
||||
)
|
||||
client = await async_get_imap_client(hass, entry_id)
|
||||
try:
|
||||
response = await client.fetch(uid, "BODY.PEEK[]")
|
||||
except (TimeoutError, AioImapException) as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="imap_server_fail",
|
||||
translation_placeholders={"error": str(exc)},
|
||||
) from exc
|
||||
raise_on_error(response, "fetch_failed")
|
||||
# Index 1 of of the response lines contains the bytearray with the message data
|
||||
message = ImapMessage(response.lines[1])
|
||||
await client.close()
|
||||
part_data = get_message_part(message.email_message, part_key)
|
||||
part_data_content = part_data.get_payload(decode=False)
|
||||
try:
|
||||
assert isinstance(part_data_content, str)
|
||||
except AssertionError as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_part_index",
|
||||
) from exc
|
||||
return {
|
||||
"part_data": part_data_content,
|
||||
"content_type": part_data.get_content_type(),
|
||||
"content_transfer_encoding": part_data.get("Content-Transfer-Encoding"),
|
||||
"filename": part_data.get_filename(),
|
||||
"part": part_key,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"fetch_part",
|
||||
async_fetch_part,
|
||||
SERVICE_FETCH_PART_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -21,7 +21,7 @@ from homeassistant.const import (
|
||||
CONF_VERIFY_SSL,
|
||||
CONTENT_TYPE_TEXT_PLAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
@@ -209,6 +209,28 @@ class ImapMessage:
|
||||
return str(self.email_message.get_payload())
|
||||
|
||||
|
||||
@callback
|
||||
def get_parts(message: Message, prefix: str | None = None) -> dict[str, Any]:
|
||||
"""Return information about the parts of a multipart message."""
|
||||
parts: dict[str, Any] = {}
|
||||
if not message.is_multipart():
|
||||
return {}
|
||||
for index, part in enumerate(message.get_payload(), 0):
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(part, Message)
|
||||
key = f"{prefix},{index}" if prefix else f"{index}"
|
||||
if part.is_multipart():
|
||||
parts |= get_parts(part, key)
|
||||
continue
|
||||
parts[key] = {"content_type": part.get_content_type()}
|
||||
if filename := part.get_filename():
|
||||
parts[key]["filename"] = filename
|
||||
if content_transfer_encoding := part.get("Content-Transfer-Encoding"):
|
||||
parts[key]["content_transfer_encoding"] = content_transfer_encoding
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Base class for imap client."""
|
||||
|
||||
@@ -275,6 +297,7 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"sender": message.sender,
|
||||
"subject": message.subject,
|
||||
"uid": last_message_uid,
|
||||
"parts": get_parts(message.email_message),
|
||||
}
|
||||
data.update({key: getattr(message, key) for key in self._event_data_keys})
|
||||
if self.custom_event_template is not None:
|
||||
|
@@ -21,6 +21,9 @@
|
||||
},
|
||||
"fetch": {
|
||||
"service": "mdi:email-sync-outline"
|
||||
},
|
||||
"fetch_part": {
|
||||
"service": "mdi:email-sync-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -56,3 +56,22 @@ fetch:
|
||||
example: "12"
|
||||
selector:
|
||||
text:
|
||||
|
||||
fetch_part:
|
||||
fields:
|
||||
entry:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: "imap"
|
||||
uid:
|
||||
required: true
|
||||
example: "12"
|
||||
selector:
|
||||
text:
|
||||
|
||||
part:
|
||||
required: true
|
||||
example: "0,1"
|
||||
selector:
|
||||
text:
|
||||
|
@@ -84,6 +84,9 @@
|
||||
"imap_server_fail": {
|
||||
"message": "The IMAP server failed to connect: {error}."
|
||||
},
|
||||
"invalid_part_index": {
|
||||
"message": "Invalid part index."
|
||||
},
|
||||
"seen_failed": {
|
||||
"message": "Marking message as seen failed with \"{error}\"."
|
||||
}
|
||||
@@ -148,6 +151,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"fetch_part": {
|
||||
"name": "Fetch message part",
|
||||
"description": "Fetches a message part or attachment from an email message.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "[%key:component::imap::services::fetch::fields::entry::name%]",
|
||||
"description": "[%key:component::imap::services::fetch::fields::entry::description%]"
|
||||
},
|
||||
"uid": {
|
||||
"name": "[%key:component::imap::services::fetch::fields::uid::name%]",
|
||||
"description": "[%key:component::imap::services::fetch::fields::uid::description%]"
|
||||
},
|
||||
"part": {
|
||||
"name": "Part",
|
||||
"description": "The message part index."
|
||||
}
|
||||
}
|
||||
},
|
||||
"seen": {
|
||||
"name": "Mark message as seen",
|
||||
"description": "Marks an email as seen.",
|
||||
|
@@ -142,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
)
|
||||
|
||||
coordinators = LaMarzoccoRuntimeData(
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, cloud_client),
|
||||
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
|
||||
|
@@ -8,7 +8,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco import LaMarzoccoCloudClient, LaMarzoccoMachine
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
SETTINGS_UPDATE_INTERVAL = timedelta(hours=8)
|
||||
SCHEDULE_UPDATE_INTERVAL = timedelta(minutes=30)
|
||||
STATISTICS_UPDATE_INTERVAL = timedelta(minutes=15)
|
||||
@@ -51,6 +51,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
hass: HomeAssistant,
|
||||
entry: LaMarzoccoConfigEntry,
|
||||
device: LaMarzoccoMachine,
|
||||
cloud_client: LaMarzoccoCloudClient | None = None,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
@@ -61,6 +62,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
update_interval=self._default_update_interval,
|
||||
)
|
||||
self.device = device
|
||||
self.cloud_client = cloud_client
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Do the data update."""
|
||||
@@ -85,11 +87,17 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Class to handle fetching data from the La Marzocco API centrally."""
|
||||
|
||||
cloud_client: LaMarzoccoCloudClient
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
|
||||
# ensure token stays valid; does nothing if token is still valid
|
||||
await self.cloud_client.async_get_access_token()
|
||||
|
||||
if self.device.websocket.connected:
|
||||
return
|
||||
|
||||
await self.device.get_dashboard()
|
||||
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
|
||||
|
||||
|
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.1.0"]
|
||||
"requirements": ["pylamarzocco==2.1.1"]
|
||||
}
|
||||
|
@@ -12,7 +12,7 @@ from homeassistant.components.number import (
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.const import PRECISION_WHOLE, EntityCategory
|
||||
from homeassistant.const import PRECISION_WHOLE, EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -72,6 +72,7 @@ NUMBERS: tuple[LetPotNumberEntityDescription, ...] = (
|
||||
LetPotNumberEntityDescription(
|
||||
key="plant_days",
|
||||
translation_key="plant_days",
|
||||
native_unit_of_measurement=UnitOfTime.DAYS,
|
||||
value_fn=lambda coordinator: coordinator.data.plant_days,
|
||||
set_value_fn=(
|
||||
lambda device_client, serial, value: device_client.set_plant_days(
|
||||
|
@@ -54,8 +54,7 @@
|
||||
"name": "Light brightness"
|
||||
},
|
||||
"plant_days": {
|
||||
"name": "Plants age",
|
||||
"unit_of_measurement": "days"
|
||||
"name": "Plants age"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/libre_hardware_monitor",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["librehardwaremonitor-api==1.3.1"]
|
||||
"requirements": ["librehardwaremonitor-api==1.4.0"]
|
||||
}
|
||||
|
@@ -28,7 +28,7 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: done
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: todo
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
|
@@ -1,7 +1,5 @@
|
||||
set_hold_time:
|
||||
target:
|
||||
device:
|
||||
integration: lyric
|
||||
entity:
|
||||
integration: lyric
|
||||
domain: climate
|
||||
|
@@ -7,8 +7,9 @@ from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionE
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_PORT, CONF_VERIFY_SSL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER, MIN_REQUIRED_MEALIE_VERSION
|
||||
from .utils import create_version
|
||||
@@ -25,13 +26,21 @@ REAUTH_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
}
|
||||
)
|
||||
DISCOVERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Mealie config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str | None = None
|
||||
verify_ssl: bool = True
|
||||
_hassio_discovery: dict[str, Any] | None = None
|
||||
|
||||
async def check_connection(
|
||||
self, api_token: str
|
||||
@@ -143,3 +152,59 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=USER_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_hassio(
|
||||
self, discovery_info: HassioServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a Mealie add-on.
|
||||
|
||||
This flow is triggered by the discovery component.
|
||||
"""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
|
||||
self._hassio_discovery = discovery_info.config
|
||||
|
||||
return await self.async_step_hassio_confirm()
|
||||
|
||||
async def async_step_hassio_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm Supervisor discovery and prompt for API token."""
|
||||
if user_input is None:
|
||||
return await self._show_hassio_form()
|
||||
|
||||
assert self._hassio_discovery
|
||||
|
||||
self.host = (
|
||||
f"{self._hassio_discovery[CONF_HOST]}:{self._hassio_discovery[CONF_PORT]}"
|
||||
)
|
||||
self.verify_ssl = True
|
||||
|
||||
errors, user_id = await self.check_connection(
|
||||
user_input[CONF_API_TOKEN],
|
||||
)
|
||||
|
||||
if not errors:
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title="Mealie",
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
|
||||
CONF_VERIFY_SSL: self.verify_ssl,
|
||||
},
|
||||
)
|
||||
return await self._show_hassio_form(errors)
|
||||
|
||||
async def _show_hassio_form(
|
||||
self, errors: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the Hass.io confirmation form to the user."""
|
||||
assert self._hassio_discovery
|
||||
return self.async_show_form(
|
||||
step_id="hassio_confirm",
|
||||
data_schema=DISCOVERY_SCHEMA,
|
||||
description_placeholders={"addon": self._hassio_discovery["addon"]},
|
||||
errors=errors or {},
|
||||
)
|
||||
|
@@ -39,8 +39,14 @@ rules:
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration will only discover a Mealie addon that is local, not on the network.
|
||||
discovery:
|
||||
status: done
|
||||
comment: |
|
||||
The integration will discover a Mealie addon posting a discovery message.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
|
@@ -39,6 +39,16 @@
|
||||
"api_token": "[%key:component::mealie::common::data_description_api_token%]",
|
||||
"verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]"
|
||||
}
|
||||
},
|
||||
"hassio_confirm": {
|
||||
"title": "Mealie via Home Assistant add-on",
|
||||
"description": "Do you want to configure Home Assistant to connect to the Mealie instance provided by the add-on: {addon}?",
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "[%key:component::mealie::common::data_description_api_token%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -50,6 +60,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_account": "You have to use the same account that was used to configure the integration."
|
||||
|
@@ -1,8 +1,7 @@
|
||||
set_text_overlay:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
fields:
|
||||
left_text:
|
||||
@@ -48,9 +47,8 @@ set_text_overlay:
|
||||
|
||||
action:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
fields:
|
||||
action:
|
||||
@@ -88,7 +86,6 @@ action:
|
||||
|
||||
snapshot:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
|
@@ -38,10 +38,7 @@ from homeassistant.core import (
|
||||
get_hassjob_callable_job_type,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -74,7 +71,6 @@ from .const import (
|
||||
DEFAULT_WS_PATH,
|
||||
DOMAIN,
|
||||
MQTT_CONNECTION_STATE,
|
||||
MQTT_PROCESSED_SUBSCRIPTIONS,
|
||||
PROTOCOL_5,
|
||||
PROTOCOL_31,
|
||||
TRANSPORT_WEBSOCKETS,
|
||||
@@ -113,7 +109,6 @@ INITIAL_SUBSCRIBE_COOLDOWN = 0.5
|
||||
SUBSCRIBE_COOLDOWN = 0.1
|
||||
UNSUBSCRIBE_COOLDOWN = 0.1
|
||||
TIMEOUT_ACK = 10
|
||||
SUBSCRIBE_TIMEOUT = 10
|
||||
RECONNECT_INTERVAL_SECONDS = 10
|
||||
|
||||
MAX_WILDCARD_SUBSCRIBES_PER_CALL = 1
|
||||
@@ -196,47 +191,11 @@ async def async_subscribe(
|
||||
msg_callback: Callable[[ReceiveMessage], Coroutine[Any, Any, None] | None],
|
||||
qos: int = DEFAULT_QOS,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
wait: bool = False,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Subscribe to an MQTT topic.
|
||||
|
||||
Call the return value to unsubscribe.
|
||||
"""
|
||||
subscription_complete: asyncio.Future[None]
|
||||
|
||||
async def _sync_mqtt_subscribe(subscriptions: list[tuple[str, int]]) -> None:
|
||||
if (topic, qos) not in subscriptions:
|
||||
return
|
||||
subscription_complete.set_result(None)
|
||||
|
||||
def _async_timeout_subscribe() -> None:
|
||||
if not subscription_complete.done():
|
||||
subscription_complete.set_exception(TimeoutError)
|
||||
|
||||
if (
|
||||
wait
|
||||
and DATA_MQTT in hass.data
|
||||
and not hass.data[DATA_MQTT].client._matching_subscriptions(topic) # noqa: SLF001
|
||||
):
|
||||
subscription_complete = hass.loop.create_future()
|
||||
dispatcher = async_dispatcher_connect(
|
||||
hass, MQTT_PROCESSED_SUBSCRIPTIONS, _sync_mqtt_subscribe
|
||||
)
|
||||
subscribe_callback = async_subscribe_internal(
|
||||
hass, topic, msg_callback, qos, encoding
|
||||
)
|
||||
try:
|
||||
hass.loop.call_later(SUBSCRIBE_TIMEOUT, _async_timeout_subscribe)
|
||||
await subscription_complete
|
||||
except TimeoutError as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="subscribe_timeout",
|
||||
) from exc
|
||||
finally:
|
||||
dispatcher()
|
||||
return subscribe_callback
|
||||
|
||||
return async_subscribe_internal(hass, topic, msg_callback, qos, encoding)
|
||||
|
||||
|
||||
@@ -1004,7 +963,6 @@ class MQTT:
|
||||
self._last_subscribe = time.monotonic()
|
||||
|
||||
await self._async_wait_for_mid_or_raise(mid, result)
|
||||
async_dispatcher_send(self.hass, MQTT_PROCESSED_SUBSCRIPTIONS, chunk_list)
|
||||
|
||||
async def _async_perform_unsubscribes(self) -> None:
|
||||
"""Perform pending MQTT client unsubscribes."""
|
||||
|
@@ -370,7 +370,6 @@ DOMAIN = "mqtt"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
MQTT_CONNECTION_STATE = "mqtt_connection_state"
|
||||
MQTT_PROCESSED_SUBSCRIPTIONS = "mqtt_processed_subscriptions"
|
||||
|
||||
PAYLOAD_EMPTY_JSON = "{}"
|
||||
PAYLOAD_NONE = "None"
|
||||
|
@@ -2,10 +2,8 @@
|
||||
"domain": "mvglive",
|
||||
"name": "MVG",
|
||||
"codeowners": [],
|
||||
"disabled": "This integration is disabled because it uses non-open source code to operate.",
|
||||
"documentation": "https://www.home-assistant.io/integrations/mvglive",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["MVGLive"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["PyMVGLive==1.1.4"]
|
||||
"loggers": ["MVG"],
|
||||
"requirements": ["mvg==1.4.0"]
|
||||
}
|
||||
|
@@ -1,13 +1,14 @@
|
||||
"""Support for departure information for public transport in Munich."""
|
||||
|
||||
# mypy: ignore-errors
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import MVGLive
|
||||
from mvg import MvgApi, MvgApiError, TransportType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -19,6 +20,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -44,53 +46,55 @@ ICONS = {
|
||||
"SEV": "mdi:checkbox-blank-circle-outline",
|
||||
"-": "mdi:clock",
|
||||
}
|
||||
ATTRIBUTION = "Data provided by MVG-live.de"
|
||||
|
||||
ATTRIBUTION = "Data provided by mvg.de"
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_NEXT_DEPARTURE): [
|
||||
{
|
||||
vol.Required(CONF_STATION): cv.string,
|
||||
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(
|
||||
CONF_PRODUCTS, default=DEFAULT_PRODUCT
|
||||
): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
}
|
||||
]
|
||||
}
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
cv.deprecated(CONF_DIRECTIONS),
|
||||
SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_NEXT_DEPARTURE): [
|
||||
{
|
||||
vol.Required(CONF_STATION): cv.string,
|
||||
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(
|
||||
CONF_PRODUCTS, default=DEFAULT_PRODUCT
|
||||
): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
}
|
||||
]
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the MVGLive sensor."""
|
||||
add_entities(
|
||||
(
|
||||
MVGLiveSensor(
|
||||
nextdeparture.get(CONF_STATION),
|
||||
nextdeparture.get(CONF_DESTINATIONS),
|
||||
nextdeparture.get(CONF_DIRECTIONS),
|
||||
nextdeparture.get(CONF_LINES),
|
||||
nextdeparture.get(CONF_PRODUCTS),
|
||||
nextdeparture.get(CONF_TIMEOFFSET),
|
||||
nextdeparture.get(CONF_NUMBER),
|
||||
nextdeparture.get(CONF_NAME),
|
||||
)
|
||||
for nextdeparture in config[CONF_NEXT_DEPARTURE]
|
||||
),
|
||||
True,
|
||||
)
|
||||
sensors = [
|
||||
MVGLiveSensor(
|
||||
hass,
|
||||
nextdeparture.get(CONF_STATION),
|
||||
nextdeparture.get(CONF_DESTINATIONS),
|
||||
nextdeparture.get(CONF_LINES),
|
||||
nextdeparture.get(CONF_PRODUCTS),
|
||||
nextdeparture.get(CONF_TIMEOFFSET),
|
||||
nextdeparture.get(CONF_NUMBER),
|
||||
nextdeparture.get(CONF_NAME),
|
||||
)
|
||||
for nextdeparture in config[CONF_NEXT_DEPARTURE]
|
||||
]
|
||||
add_entities(sensors, True)
|
||||
|
||||
|
||||
class MVGLiveSensor(SensorEntity):
|
||||
@@ -100,38 +104,38 @@ class MVGLiveSensor(SensorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
station,
|
||||
hass: HomeAssistant,
|
||||
station_name,
|
||||
destinations,
|
||||
directions,
|
||||
lines,
|
||||
products,
|
||||
timeoffset,
|
||||
number,
|
||||
name,
|
||||
):
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._station = station
|
||||
self._name = name
|
||||
self._station_name = station_name
|
||||
self.data = MVGLiveData(
|
||||
station, destinations, directions, lines, products, timeoffset, number
|
||||
hass, station_name, destinations, lines, products, timeoffset, number
|
||||
)
|
||||
self._state = None
|
||||
self._icon = ICONS["-"]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str | None:
|
||||
"""Return the name of the sensor."""
|
||||
if self._name:
|
||||
return self._name
|
||||
return self._station
|
||||
return self._station_name
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the next departure time."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> Mapping[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
if not (dep := self.data.departures):
|
||||
return None
|
||||
@@ -140,88 +144,114 @@ class MVGLiveSensor(SensorEntity):
|
||||
return attr
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
def icon(self) -> str | None:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit this state is expressed in."""
|
||||
return UnitOfTime.MINUTES
|
||||
|
||||
def update(self) -> None:
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data and update the state."""
|
||||
self.data.update()
|
||||
await self.data.update()
|
||||
if not self.data.departures:
|
||||
self._state = "-"
|
||||
self._state = None
|
||||
self._icon = ICONS["-"]
|
||||
else:
|
||||
self._state = self.data.departures[0].get("time", "-")
|
||||
self._icon = ICONS[self.data.departures[0].get("product", "-")]
|
||||
self._state = self.data.departures[0].get("time_in_mins", "-")
|
||||
self._icon = self.data.departures[0].get("icon", ICONS["-"])
|
||||
|
||||
|
||||
def _get_minutes_until_departure(departure_time: int) -> int:
|
||||
"""Calculate the time difference in minutes between the current time and a given departure time.
|
||||
|
||||
Args:
|
||||
departure_time: Unix timestamp of the departure time, in seconds.
|
||||
|
||||
Returns:
|
||||
The time difference in minutes, as an integer.
|
||||
|
||||
"""
|
||||
current_time = dt_util.utcnow()
|
||||
departure_datetime = dt_util.utc_from_timestamp(departure_time)
|
||||
time_difference = (departure_datetime - current_time).total_seconds()
|
||||
return int(time_difference / 60.0)
|
||||
|
||||
|
||||
class MVGLiveData:
|
||||
"""Pull data from the mvg-live.de web page."""
|
||||
"""Pull data from the mvg.de web page."""
|
||||
|
||||
def __init__(
|
||||
self, station, destinations, directions, lines, products, timeoffset, number
|
||||
):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
station_name,
|
||||
destinations,
|
||||
lines,
|
||||
products,
|
||||
timeoffset,
|
||||
number,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._station = station
|
||||
self._hass = hass
|
||||
self._station_name = station_name
|
||||
self._station_id = None
|
||||
self._destinations = destinations
|
||||
self._directions = directions
|
||||
self._lines = lines
|
||||
self._products = products
|
||||
self._timeoffset = timeoffset
|
||||
self._number = number
|
||||
self._include_ubahn = "U-Bahn" in self._products
|
||||
self._include_tram = "Tram" in self._products
|
||||
self._include_bus = "Bus" in self._products
|
||||
self._include_sbahn = "S-Bahn" in self._products
|
||||
self.mvg = MVGLive.MVGLive()
|
||||
self.departures = []
|
||||
self.departures: list[dict[str, Any]] = []
|
||||
|
||||
def update(self):
|
||||
async def update(self):
|
||||
"""Update the connection data."""
|
||||
if self._station_id is None:
|
||||
try:
|
||||
station = await MvgApi.station_async(self._station_name)
|
||||
self._station_id = station["id"]
|
||||
except MvgApiError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to resolve station %s: %s", self._station_name, err
|
||||
)
|
||||
self.departures = []
|
||||
return
|
||||
|
||||
try:
|
||||
_departures = self.mvg.getlivedata(
|
||||
station=self._station,
|
||||
timeoffset=self._timeoffset,
|
||||
ubahn=self._include_ubahn,
|
||||
tram=self._include_tram,
|
||||
bus=self._include_bus,
|
||||
sbahn=self._include_sbahn,
|
||||
_departures = await MvgApi.departures_async(
|
||||
station_id=self._station_id,
|
||||
offset=self._timeoffset,
|
||||
limit=self._number,
|
||||
transport_types=[
|
||||
transport_type
|
||||
for transport_type in TransportType
|
||||
if transport_type.value[0] in self._products
|
||||
]
|
||||
if self._products
|
||||
else None,
|
||||
)
|
||||
except ValueError:
|
||||
self.departures = []
|
||||
_LOGGER.warning("Returned data not understood")
|
||||
return
|
||||
self.departures = []
|
||||
for i, _departure in enumerate(_departures):
|
||||
# find the first departure meeting the criteria
|
||||
for _departure in _departures:
|
||||
if (
|
||||
"" not in self._destinations[:1]
|
||||
and _departure["destination"] not in self._destinations
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
"" not in self._directions[:1]
|
||||
and _departure["direction"] not in self._directions
|
||||
):
|
||||
if "" not in self._lines[:1] and _departure["line"] not in self._lines:
|
||||
continue
|
||||
|
||||
if "" not in self._lines[:1] and _departure["linename"] not in self._lines:
|
||||
time_to_departure = _get_minutes_until_departure(_departure["time"])
|
||||
|
||||
if time_to_departure < self._timeoffset:
|
||||
continue
|
||||
|
||||
if _departure["time"] < self._timeoffset:
|
||||
continue
|
||||
|
||||
# now select the relevant data
|
||||
_nextdep = {}
|
||||
for k in ("destination", "linename", "time", "direction", "product"):
|
||||
for k in ("destination", "line", "type", "cancelled", "icon"):
|
||||
_nextdep[k] = _departure.get(k, "")
|
||||
_nextdep["time"] = int(_nextdep["time"])
|
||||
_nextdep["time_in_mins"] = time_to_departure
|
||||
self.departures.append(_nextdep)
|
||||
if i == self._number - 1:
|
||||
break
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.18.0"]
|
||||
"requirements": ["nibe==2.19.0"]
|
||||
}
|
||||
|
@@ -11,7 +11,7 @@
|
||||
"_r_to_u": "City/county (R-U)",
|
||||
"_v_to_z": "City/county (V-Z)",
|
||||
"slots": "Maximum warnings per city/county",
|
||||
"headline_filter": "Blacklist regex to filter warning headlines"
|
||||
"headline_filter": "Headline blocklist"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -34,7 +34,7 @@
|
||||
"_v_to_z": "[%key:component::nina::config::step::user::data::_v_to_z%]",
|
||||
"slots": "[%key:component::nina::config::step::user::data::slots%]",
|
||||
"headline_filter": "[%key:component::nina::config::step::user::data::headline_filter%]",
|
||||
"area_filter": "Whitelist regex to filter warnings based on affected areas"
|
||||
"area_filter": "Affected area filter"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -13,6 +13,6 @@ NMAP_TRACKED_DEVICES: Final = "nmap_tracked_devices"
|
||||
# Interval in minutes to exclude devices from a scan while they are home
|
||||
CONF_HOME_INTERVAL: Final = "home_interval"
|
||||
CONF_OPTIONS: Final = "scan_options"
|
||||
DEFAULT_OPTIONS: Final = "-F -T4 --min-rate 10 --host-timeout 5s"
|
||||
DEFAULT_OPTIONS: Final = "-n -sn -PR -T4 --min-rate 10 --host-timeout 5s"
|
||||
|
||||
TRACKER_SCAN_INTERVAL: Final = 120
|
||||
|
@@ -5,7 +5,14 @@ from __future__ import annotations
|
||||
from pyportainer import Portainer
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST, Platform
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_API_TOKEN,
|
||||
CONF_HOST,
|
||||
CONF_URL,
|
||||
CONF_VERIFY_SSL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
@@ -19,11 +26,12 @@ type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Set up Portainer from a config entry."""
|
||||
|
||||
session = async_create_clientsession(hass)
|
||||
client = Portainer(
|
||||
api_url=entry.data[CONF_HOST],
|
||||
api_key=entry.data[CONF_API_KEY],
|
||||
session=session,
|
||||
api_url=entry.data[CONF_URL],
|
||||
api_key=entry.data[CONF_API_TOKEN],
|
||||
session=async_create_clientsession(
|
||||
hass=hass, verify_ssl=entry.data[CONF_VERIFY_SSL]
|
||||
),
|
||||
)
|
||||
|
||||
coordinator = PortainerCoordinator(hass, entry, client)
|
||||
@@ -38,3 +46,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
|
||||
if entry.version < 2:
|
||||
data = dict(entry.data)
|
||||
data[CONF_URL] = data.pop(CONF_HOST)
|
||||
data[CONF_API_TOKEN] = data.pop(CONF_API_KEY)
|
||||
hass.config_entries.async_update_entry(entry=entry, data=data, version=2)
|
||||
|
||||
return True
|
||||
|
@@ -131,7 +131,15 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
# Container ID's are ephemeral, so use the container name for the unique ID
|
||||
# The first one, should always be unique, it's fine if users have aliases
|
||||
# According to Docker's API docs, the first name is unique
|
||||
device_identifier = (
|
||||
self._device_info.names[0].replace("/", " ").strip()
|
||||
if self._device_info.names
|
||||
else None
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
@@ -14,7 +14,7 @@ from pyportainer import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -24,8 +24,9 @@ from .const import DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -34,9 +35,11 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
client = Portainer(
|
||||
api_url=data[CONF_HOST],
|
||||
api_key=data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
api_url=data[CONF_URL],
|
||||
api_key=data[CONF_API_TOKEN],
|
||||
session=async_get_clientsession(
|
||||
hass=hass, verify_ssl=data.get(CONF_VERIFY_SSL, True)
|
||||
),
|
||||
)
|
||||
try:
|
||||
await client.get_endpoints()
|
||||
@@ -47,19 +50,21 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
except PortainerTimeoutError as err:
|
||||
raise PortainerTimeout from err
|
||||
|
||||
_LOGGER.debug("Connected to Portainer API: %s", data[CONF_HOST])
|
||||
_LOGGER.debug("Connected to Portainer API: %s", data[CONF_URL])
|
||||
|
||||
|
||||
class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Portainer."""
|
||||
|
||||
VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
try:
|
||||
await _validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
@@ -72,10 +77,10 @@ class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_API_KEY])
|
||||
await self.async_set_unique_id(user_input[CONF_API_TOKEN])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_HOST], data=user_input
|
||||
title=user_input[CONF_URL], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -16,7 +16,7 @@ from pyportainer.models.docker import DockerContainer
|
||||
from pyportainer.models.portainer import Endpoint
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -87,7 +87,7 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
async def _async_update_data(self) -> dict[int, PortainerCoordinatorData]:
|
||||
"""Fetch data from Portainer API."""
|
||||
_LOGGER.debug(
|
||||
"Fetching data from Portainer API: %s", self.config_entry.data[CONF_HOST]
|
||||
"Fetching data from Portainer API: %s", self.config_entry.data[CONF_URL]
|
||||
)
|
||||
|
||||
try:
|
||||
|
@@ -60,7 +60,7 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{self.device_id}")
|
||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{device_name}")
|
||||
},
|
||||
manufacturer=DEFAULT_NAME,
|
||||
model="Container",
|
||||
|
@@ -3,14 +3,16 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The host/URL, including the port, of your Portainer instance",
|
||||
"api_key": "The API key for authenticating with Portainer"
|
||||
"url": "The URL, including the port, of your Portainer instance",
|
||||
"api_token": "The API access token for authenticating with Portainer",
|
||||
"verify_ssl": "Whether to verify SSL certificates. Disable only if you have a self-signed certificate"
|
||||
},
|
||||
"description": "You can create an API key in the Portainer UI. Go to **My account > API keys** and select **Add API key**"
|
||||
"description": "You can create an access token in the Portainer UI. Go to **My account > Access tokens** and select **Add access token**"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@@ -351,13 +351,9 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
def _set_current_map(self) -> None:
|
||||
if (
|
||||
self.roborock_device_info.props.status is not None
|
||||
and self.roborock_device_info.props.status.map_status is not None
|
||||
and self.roborock_device_info.props.status.current_map is not None
|
||||
):
|
||||
# The map status represents the map flag as flag * 4 + 3 -
|
||||
# so we have to invert that in order to get the map flag that we can use to set the current map.
|
||||
self.current_map = (
|
||||
self.roborock_device_info.props.status.map_status - 3
|
||||
) // 4
|
||||
self.current_map = self.roborock_device_info.props.status.current_map
|
||||
|
||||
async def set_current_map_rooms(self) -> None:
|
||||
"""Fetch all of the rooms for the current map and set on RoborockMapInfo."""
|
||||
@@ -440,7 +436,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
# If either of these fail, we don't care, and we want to continue.
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
if len(self.maps) != 1:
|
||||
if len(self.maps) > 1:
|
||||
# Set the map back to the map the user previously had selected so that it
|
||||
# does not change the end user's app.
|
||||
# Only needs to happen when we changed maps above.
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiorussound==4.8.1"],
|
||||
"requirements": ["aiorussound==4.8.2"],
|
||||
"zeroconf": ["_rio._tcp.local."]
|
||||
}
|
||||
|
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.2.9"]
|
||||
"requirements": ["pysmartthings==3.3.0"]
|
||||
}
|
||||
|
@@ -610,7 +610,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
|
||||
def _play_media_queue(
|
||||
self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue
|
||||
):
|
||||
) -> None:
|
||||
"""Manage adding, replacing, playing items onto the sonos queue."""
|
||||
_LOGGER.debug(
|
||||
"_play_media_queue item_id [%s] title [%s] enqueue [%s]",
|
||||
@@ -639,7 +639,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
media_type: MediaType | str,
|
||||
media_id: str,
|
||||
enqueue: MediaPlayerEnqueue,
|
||||
):
|
||||
) -> None:
|
||||
"""Play a directory from a music library share."""
|
||||
item = media_browser.get_media(self.media.library, media_id, media_type)
|
||||
if not item:
|
||||
@@ -660,6 +660,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
enqueue: MediaPlayerEnqueue,
|
||||
title: str,
|
||||
) -> None:
|
||||
"""Play a sharelink."""
|
||||
share_link = self.coordinator.share_link
|
||||
kwargs = {}
|
||||
if title:
|
||||
|
@@ -24,8 +24,9 @@ restore:
|
||||
|
||||
set_sleep_timer:
|
||||
target:
|
||||
device:
|
||||
entity:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
sleep_time:
|
||||
selector:
|
||||
@@ -36,13 +37,15 @@ set_sleep_timer:
|
||||
|
||||
clear_sleep_timer:
|
||||
target:
|
||||
device:
|
||||
entity:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
|
||||
play_queue:
|
||||
target:
|
||||
device:
|
||||
entity:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
queue_position:
|
||||
selector:
|
||||
@@ -53,8 +56,9 @@ play_queue:
|
||||
|
||||
remove_from_queue:
|
||||
target:
|
||||
device:
|
||||
entity:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
queue_position:
|
||||
selector:
|
||||
@@ -71,8 +75,9 @@ get_queue:
|
||||
|
||||
update_alarm:
|
||||
target:
|
||||
device:
|
||||
entity:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
alarm_id:
|
||||
required: true
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""The Squeezebox integration."""
|
||||
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
@@ -31,11 +32,11 @@ from homeassistant.helpers.device_registry import (
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
CONF_HTTPS,
|
||||
DISCOVERY_INTERVAL,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
@@ -64,6 +65,8 @@ PLATFORMS = [
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
SQUEEZEBOX_HASS_DATA: HassKey[asyncio.Task] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SqueezeboxData:
|
||||
@@ -240,7 +243,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry)
|
||||
current_entries = hass.config_entries.async_entries(DOMAIN)
|
||||
if len(current_entries) == 1 and current_entries[0] == entry:
|
||||
_LOGGER.debug("Stopping server discovery task")
|
||||
hass.data[DOMAIN][DISCOVERY_TASK].cancel()
|
||||
hass.data[DOMAIN].pop(DISCOVERY_TASK)
|
||||
hass.data[SQUEEZEBOX_HASS_DATA].cancel()
|
||||
hass.data.pop(SQUEEZEBOX_HASS_DATA)
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import contextlib
|
||||
from dataclasses import dataclass, field
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from pysqueezebox import Player
|
||||
|
||||
@@ -14,7 +14,6 @@ from homeassistant.components.media_player import (
|
||||
BrowseError,
|
||||
BrowseMedia,
|
||||
MediaClass,
|
||||
MediaPlayerEntity,
|
||||
MediaType,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -22,6 +21,9 @@ from homeassistant.helpers.network import is_internal_request
|
||||
|
||||
from .const import DOMAIN, UNPLAYABLE_TYPES
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .media_player import SqueezeBoxMediaPlayerEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LIBRARY = [
|
||||
@@ -244,14 +246,13 @@ def _build_response_favorites(item: dict[str, Any]) -> BrowseMedia:
|
||||
def _get_item_thumbnail(
|
||||
item: dict[str, Any],
|
||||
player: Player,
|
||||
entity: MediaPlayerEntity,
|
||||
entity: SqueezeBoxMediaPlayerEntity,
|
||||
item_type: str | MediaType | None,
|
||||
search_type: str,
|
||||
internal_request: bool,
|
||||
known_apps_radios: set[str],
|
||||
) -> str | None:
|
||||
"""Construct path to thumbnail image."""
|
||||
item_thumbnail: str | None = None
|
||||
|
||||
track_id = item.get("artwork_track_id") or (
|
||||
item.get("id")
|
||||
@@ -262,21 +263,27 @@ def _get_item_thumbnail(
|
||||
|
||||
if track_id:
|
||||
if internal_request:
|
||||
item_thumbnail = player.generate_image_url_from_track_id(track_id)
|
||||
elif item_type is not None:
|
||||
item_thumbnail = entity.get_browse_image_url(
|
||||
item_type, item["id"], track_id
|
||||
)
|
||||
return cast(str, player.generate_image_url_from_track_id(track_id))
|
||||
if item_type is not None:
|
||||
return entity.get_browse_image_url(item_type, item["id"], track_id)
|
||||
|
||||
elif search_type in ["apps", "radios"]:
|
||||
item_thumbnail = player.generate_image_url(item["icon"])
|
||||
if item_thumbnail is None:
|
||||
item_thumbnail = item.get("image_url") # will not be proxied by HA
|
||||
return item_thumbnail
|
||||
url = None
|
||||
content_type = item_type or "unknown"
|
||||
|
||||
if search_type in ["apps", "radios"]:
|
||||
url = cast(str, player.generate_image_url(item["icon"]))
|
||||
elif image_url := item.get("image_url"):
|
||||
url = image_url
|
||||
|
||||
if internal_request or not url:
|
||||
return url
|
||||
|
||||
synthetic_id = entity.get_synthetic_id_and_cache_url(url)
|
||||
return entity.get_browse_image_url(content_type, "synthetic", synthetic_id)
|
||||
|
||||
|
||||
async def build_item_response(
|
||||
entity: MediaPlayerEntity,
|
||||
entity: SqueezeBoxMediaPlayerEntity,
|
||||
player: Player,
|
||||
payload: dict[str, str | None],
|
||||
browse_limit: int,
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Constants for the Squeezebox component."""
|
||||
|
||||
CONF_HTTPS = "https"
|
||||
DISCOVERY_TASK = "discovery_task"
|
||||
DOMAIN = "squeezebox"
|
||||
DEFAULT_PORT = 9000
|
||||
PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub"
|
||||
|
@@ -8,6 +8,7 @@ import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from lru import LRU
|
||||
from pysqueezebox import Server, async_discover
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -43,7 +44,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.ulid import ulid_now
|
||||
|
||||
from . import SQUEEZEBOX_HASS_DATA
|
||||
from .browse_media import (
|
||||
BrowseData,
|
||||
build_item_response,
|
||||
@@ -58,7 +61,6 @@ from .const import (
|
||||
CONF_VOLUME_STEP,
|
||||
DEFAULT_BROWSE_LIMIT,
|
||||
DEFAULT_VOLUME_STEP,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
@@ -110,12 +112,10 @@ async def start_server_discovery(hass: HomeAssistant) -> None:
|
||||
},
|
||||
)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
if DISCOVERY_TASK not in hass.data[DOMAIN]:
|
||||
if not hass.data.get(SQUEEZEBOX_HASS_DATA):
|
||||
_LOGGER.debug("Adding server discovery task for squeezebox")
|
||||
hass.data[DOMAIN][DISCOVERY_TASK] = hass.async_create_background_task(
|
||||
async_discover(_discovered_server),
|
||||
name="squeezebox server discovery",
|
||||
hass.data[SQUEEZEBOX_HASS_DATA] = hass.async_create_background_task(
|
||||
async_discover(_discovered_server), name="squeezebox server discovery"
|
||||
)
|
||||
|
||||
|
||||
@@ -262,6 +262,7 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
self._previous_media_position = 0
|
||||
self._attr_unique_id = format_mac(self._player.player_id)
|
||||
self._browse_data = BrowseData()
|
||||
self._synthetic_media_browser_thumbnail_items: LRU[str, str] = LRU(5000)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
@@ -744,6 +745,17 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
await self._player.async_unsync()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
def get_synthetic_id_and_cache_url(self, url: str) -> str:
|
||||
"""Cache a thumbnail URL and return a synthetic ID.
|
||||
|
||||
This enables us to proxy thumbnails for apps and favorites, as those do not have IDs.
|
||||
"""
|
||||
synthetic_id = f"s_{ulid_now()}"
|
||||
|
||||
self._synthetic_media_browser_thumbnail_items[synthetic_id] = url
|
||||
|
||||
return synthetic_id
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
media_content_type: MediaType | str | None = None,
|
||||
@@ -787,11 +799,21 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
media_image_id: str | None = None,
|
||||
) -> tuple[bytes | None, str | None]:
|
||||
"""Get album art from Squeezebox server."""
|
||||
if media_image_id:
|
||||
image_url = self._player.generate_image_url_from_track_id(media_image_id)
|
||||
result = await self._async_fetch_image(image_url)
|
||||
if result == (None, None):
|
||||
_LOGGER.debug("Error retrieving proxied album art from %s", image_url)
|
||||
return result
|
||||
if not media_image_id:
|
||||
return (None, None)
|
||||
|
||||
return (None, None)
|
||||
if media_content_id == "synthetic":
|
||||
image_url = self._synthetic_media_browser_thumbnail_items.get(
|
||||
media_image_id
|
||||
)
|
||||
|
||||
if image_url is None:
|
||||
_LOGGER.debug("Synthetic ID %s not found in cache", media_image_id)
|
||||
return (None, None)
|
||||
else:
|
||||
image_url = self._player.generate_image_url_from_track_id(media_image_id)
|
||||
|
||||
result = await self._async_fetch_image(image_url)
|
||||
if result == (None, None):
|
||||
_LOGGER.debug("Error retrieving proxied album art from %s", image_url)
|
||||
return result
|
||||
|
@@ -3,16 +3,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import cast
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_CONDITION, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.const import CONF_OPTIONS, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
condition_trace_set_result,
|
||||
condition_trace_update_result,
|
||||
trace_condition_function,
|
||||
@@ -21,20 +23,22 @@ from homeassistant.helpers.sun import get_astral_event_date
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "sun",
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
|
||||
),
|
||||
vol.Optional("after_offset"): cv.time_period,
|
||||
}
|
||||
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
|
||||
),
|
||||
cv.has_at_least_one_key("before", "after"),
|
||||
vol.Optional("after_offset"): cv.time_period,
|
||||
}
|
||||
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): vol.All(
|
||||
_OPTIONS_SCHEMA_DICT,
|
||||
cv.has_at_least_one_key("before", "after"),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -125,24 +129,36 @@ def sun(
|
||||
class SunCondition(Condition):
|
||||
"""Sun condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
_options: dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
return cast(ConfigType, _CONDITION_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Wrap action method with sun based condition."""
|
||||
before = self._config.get("before")
|
||||
after = self._config.get("after")
|
||||
before_offset = self._config.get("before_offset")
|
||||
after_offset = self._config.get("after_offset")
|
||||
before = self._options.get("before")
|
||||
after = self._options.get("after")
|
||||
before_offset = self._options.get("before_offset")
|
||||
after_offset = self._options.get("after_offset")
|
||||
|
||||
@trace_condition_function
|
||||
def sun_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
|
@@ -100,6 +100,7 @@ PLATFORMS_BY_TYPE = {
|
||||
SupportedModels.RGBICWW_STRIP_LIGHT.value: [Platform.LIGHT, Platform.SENSOR],
|
||||
SupportedModels.PLUG_MINI_EU.value: [Platform.SWITCH, Platform.SENSOR],
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: [Platform.SWITCH, Platform.SENSOR],
|
||||
SupportedModels.GARAGE_DOOR_OPENER.value: [Platform.COVER, Platform.SENSOR],
|
||||
}
|
||||
CLASS_BY_DEVICE = {
|
||||
SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight,
|
||||
@@ -133,6 +134,7 @@ CLASS_BY_DEVICE = {
|
||||
SupportedModels.RGBICWW_STRIP_LIGHT.value: switchbot.SwitchbotRgbicLight,
|
||||
SupportedModels.PLUG_MINI_EU.value: switchbot.SwitchbotRelaySwitch,
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: switchbot.SwitchbotRelaySwitch2PM,
|
||||
SupportedModels.GARAGE_DOOR_OPENER.value: switchbot.SwitchbotGarageDoorOpener,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -56,6 +56,7 @@ class SupportedModels(StrEnum):
|
||||
PLUG_MINI_EU = "plug_mini_eu"
|
||||
RELAY_SWITCH_2PM = "relay_switch_2pm"
|
||||
K11_PLUS_VACUUM = "k11+_vacuum"
|
||||
GARAGE_DOOR_OPENER = "garage_door_opener"
|
||||
|
||||
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -91,6 +92,7 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
SwitchbotModel.PLUG_MINI_EU: SupportedModels.PLUG_MINI_EU,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM: SupportedModels.RELAY_SWITCH_2PM,
|
||||
SwitchbotModel.K11_VACUUM: SupportedModels.K11_PLUS_VACUUM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER: SupportedModels.GARAGE_DOOR_OPENER,
|
||||
}
|
||||
|
||||
NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -126,6 +128,7 @@ ENCRYPTED_MODELS = {
|
||||
SwitchbotModel.RGBICWW_FLOOR_LAMP,
|
||||
SwitchbotModel.PLUG_MINI_EU,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER,
|
||||
}
|
||||
|
||||
ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
@@ -146,6 +149,7 @@ ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
SwitchbotModel.RGBICWW_FLOOR_LAMP: switchbot.SwitchbotRgbicLight,
|
||||
SwitchbotModel.PLUG_MINI_EU: switchbot.SwitchbotRelaySwitch,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM: switchbot.SwitchbotRelaySwitch2PM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER: switchbot.SwitchbotRelaySwitch,
|
||||
}
|
||||
|
||||
HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {
|
||||
|
@@ -35,7 +35,9 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Switchbot curtain based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
if isinstance(coordinator.device, switchbot.SwitchbotBlindTilt):
|
||||
if isinstance(coordinator.device, switchbot.SwitchbotGarageDoorOpener):
|
||||
async_add_entities([SwitchbotGarageDoorOpenerEntity(coordinator)])
|
||||
elif isinstance(coordinator.device, switchbot.SwitchbotBlindTilt):
|
||||
async_add_entities([SwitchBotBlindTiltEntity(coordinator)])
|
||||
elif isinstance(coordinator.device, switchbot.SwitchbotRollerShade):
|
||||
async_add_entities([SwitchBotRollerShadeEntity(coordinator)])
|
||||
@@ -295,3 +297,30 @@ class SwitchBotRollerShadeEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
self._attr_is_closed = self.parsed_data["position"] <= 20
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class SwitchbotGarageDoorOpenerEntity(SwitchbotEntity, CoverEntity):
|
||||
"""Representation of a Switchbot garage door."""
|
||||
|
||||
_device: switchbot.SwitchbotGarageDoorOpener
|
||||
_attr_device_class = CoverDeviceClass.GARAGE
|
||||
_attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
_attr_translation_key = "garage_door"
|
||||
_attr_name = None
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Return true if cover is closed, else False."""
|
||||
return not self._device.door_open()
|
||||
|
||||
@exception_handler
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the garage door."""
|
||||
await self._device.open()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@exception_handler
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the garage door."""
|
||||
await self._device.close()
|
||||
self.async_write_ha_state()
|
||||
|
@@ -5,7 +5,11 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import onboarding
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -18,14 +22,18 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_import(self, import_data: None) -> ConfigFlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
await self.async_set_unique_id(
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID, raise_on_progress=False
|
||||
)
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
await self.async_set_unique_id(
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID, raise_on_progress=False
|
||||
)
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_zeroconf(
|
||||
|
@@ -8,5 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==2.7.0", "pyroute2==0.7.5"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_meshcop._udp.local."]
|
||||
}
|
||||
|
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyuptimerobot import UptimeRobotMonitor
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
@@ -12,6 +14,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import UptimeRobotConfigEntry
|
||||
from .entity import UptimeRobotEntity
|
||||
from .utils import new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -25,28 +28,23 @@ async def async_setup_entry(
|
||||
"""Set up the UptimeRobot binary_sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
current_devices = {monitor.id for monitor in coordinator.data}
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
UptimeRobotBinarySensor(
|
||||
coordinator,
|
||||
BinarySensorEntityDescription(
|
||||
key=str(monitor.id),
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in coordinator.data
|
||||
if monitor.id in new_devices
|
||||
def _add_new_entities(new_monitors: list[UptimeRobotMonitor]) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
UptimeRobotBinarySensor(
|
||||
coordinator,
|
||||
BinarySensorEntityDescription(
|
||||
key=str(monitor.id),
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in new_monitors
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
_check_device()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
entry.async_on_unload(new_device_listener(coordinator, _add_new_entities))
|
||||
|
||||
|
||||
class UptimeRobotBinarySensor(UptimeRobotEntity, BinarySensorEntity):
|
||||
|
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyuptimerobot import UptimeRobotMonitor
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -13,6 +15,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import UptimeRobotConfigEntry
|
||||
from .entity import UptimeRobotEntity
|
||||
from .utils import new_device_listener
|
||||
|
||||
SENSORS_INFO = {
|
||||
0: "pause",
|
||||
@@ -34,37 +37,32 @@ async def async_setup_entry(
|
||||
"""Set up the UptimeRobot sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
current_devices = {monitor.id for monitor in coordinator.data}
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
UptimeRobotSensor(
|
||||
coordinator,
|
||||
SensorEntityDescription(
|
||||
key=str(monitor.id),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[
|
||||
"down",
|
||||
"not_checked_yet",
|
||||
"pause",
|
||||
"seems_down",
|
||||
"up",
|
||||
],
|
||||
translation_key="monitor_status",
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in coordinator.data
|
||||
if monitor.id in new_devices
|
||||
def _add_new_entities(new_monitors: list[UptimeRobotMonitor]) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
UptimeRobotSensor(
|
||||
coordinator,
|
||||
SensorEntityDescription(
|
||||
key=str(monitor.id),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[
|
||||
"down",
|
||||
"not_checked_yet",
|
||||
"pause",
|
||||
"seems_down",
|
||||
"up",
|
||||
],
|
||||
translation_key="monitor_status",
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in new_monitors
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
_check_device()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
entry.async_on_unload(new_device_listener(coordinator, _add_new_entities))
|
||||
|
||||
|
||||
class UptimeRobotSensor(UptimeRobotEntity, SensorEntity):
|
||||
|
@@ -4,7 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyuptimerobot import UptimeRobotAuthenticationException, UptimeRobotException
|
||||
from pyuptimerobot import (
|
||||
UptimeRobotAuthenticationException,
|
||||
UptimeRobotException,
|
||||
UptimeRobotMonitor,
|
||||
)
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
@@ -18,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .const import API_ATTR_OK, DOMAIN
|
||||
from .coordinator import UptimeRobotConfigEntry
|
||||
from .entity import UptimeRobotEntity
|
||||
from .utils import new_device_listener
|
||||
|
||||
# Limit the number of parallel updates to 1
|
||||
PARALLEL_UPDATES = 1
|
||||
@@ -31,28 +36,23 @@ async def async_setup_entry(
|
||||
"""Set up the UptimeRobot switches."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
current_devices = {monitor.id for monitor in coordinator.data}
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
UptimeRobotSwitch(
|
||||
coordinator,
|
||||
SwitchEntityDescription(
|
||||
key=str(monitor.id),
|
||||
device_class=SwitchDeviceClass.SWITCH,
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in coordinator.data
|
||||
if monitor.id in new_devices
|
||||
def _add_new_entities(new_monitors: list[UptimeRobotMonitor]) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
UptimeRobotSwitch(
|
||||
coordinator,
|
||||
SwitchEntityDescription(
|
||||
key=str(monitor.id),
|
||||
device_class=SwitchDeviceClass.SWITCH,
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in new_monitors
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
_check_device()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
entry.async_on_unload(new_device_listener(coordinator, _add_new_entities))
|
||||
|
||||
|
||||
class UptimeRobotSwitch(UptimeRobotEntity, SwitchEntity):
|
||||
|
34
homeassistant/components/uptimerobot/utils.py
Normal file
34
homeassistant/components/uptimerobot/utils.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Utility functions for the UptimeRobot integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from pyuptimerobot import UptimeRobotMonitor
|
||||
|
||||
from .coordinator import UptimeRobotDataUpdateCoordinator
|
||||
|
||||
|
||||
def new_device_listener(
|
||||
coordinator: UptimeRobotDataUpdateCoordinator,
|
||||
new_devices_callback: Callable[[list[UptimeRobotMonitor]], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to coordinator updates to check for new devices."""
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_devices() -> None:
|
||||
"""Check for new devices and call callback with any new monitors."""
|
||||
if not coordinator.data:
|
||||
return
|
||||
|
||||
new_monitors: list[UptimeRobotMonitor] = []
|
||||
for monitor in coordinator.data:
|
||||
if monitor.id not in known_devices:
|
||||
known_devices.add(monitor.id)
|
||||
new_monitors.append(monitor)
|
||||
|
||||
if new_monitors:
|
||||
new_devices_callback(new_monitors)
|
||||
|
||||
# Check for devices immediately
|
||||
_check_devices()
|
||||
|
||||
return coordinator.async_add_listener(_check_devices)
|
@@ -3,13 +3,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import Counter
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Sequence
|
||||
from datetime import datetime, timedelta
|
||||
from functools import cache
|
||||
import logging
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
@@ -38,13 +39,11 @@ ALLOWED_DOMAINS = {
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.CAMERA,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.FAN,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.IMAGE,
|
||||
Platform.LAWN_MOWER,
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
@@ -55,7 +54,6 @@ ALLOWED_DOMAINS = {
|
||||
Platform.SENSOR,
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.TEXT,
|
||||
Platform.VACUUM,
|
||||
Platform.VALVE,
|
||||
Platform.WATER_HEATER,
|
||||
@@ -93,61 +91,32 @@ async def async_predict_common_control(
|
||||
Args:
|
||||
hass: Home Assistant instance
|
||||
user_id: User ID to filter events by.
|
||||
|
||||
Returns:
|
||||
Dictionary with time categories as keys and lists of most common entity IDs as values
|
||||
"""
|
||||
# Get the recorder instance to ensure it's ready
|
||||
recorder = get_instance(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Execute the database operation in the recorder's executor
|
||||
return await recorder.async_add_executor_job(
|
||||
data = await recorder.async_add_executor_job(
|
||||
_fetch_with_session, hass, _fetch_and_process_data, ent_reg, user_id
|
||||
)
|
||||
|
||||
|
||||
def _fetch_and_process_data(
|
||||
session: Session, ent_reg: er.EntityRegistry, user_id: str
|
||||
) -> EntityUsagePredictions:
|
||||
"""Fetch and process service call events from the database."""
|
||||
# Prepare a dictionary to track results
|
||||
results: dict[str, Counter[str]] = {
|
||||
time_cat: Counter() for time_cat in TIME_CATEGORIES
|
||||
}
|
||||
|
||||
allowed_entities = set(hass.states.async_entity_ids(ALLOWED_DOMAINS))
|
||||
hidden_entities: set[str] = set()
|
||||
|
||||
# Keep track of contexts that we processed so that we will only process
|
||||
# the first service call in a context, and not subsequent calls.
|
||||
context_processed: set[bytes] = set()
|
||||
thirty_days_ago_ts = (dt_util.utcnow() - timedelta(days=30)).timestamp()
|
||||
user_id_bytes = uuid_hex_to_bytes_or_none(user_id)
|
||||
if not user_id_bytes:
|
||||
raise ValueError("Invalid user_id format")
|
||||
|
||||
# Build the main query for events with their data
|
||||
query = (
|
||||
select(
|
||||
Events.context_id_bin,
|
||||
Events.time_fired_ts,
|
||||
EventData.shared_data,
|
||||
)
|
||||
.select_from(Events)
|
||||
.outerjoin(EventData, Events.data_id == EventData.data_id)
|
||||
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||
.where(Events.time_fired_ts >= thirty_days_ago_ts)
|
||||
.where(Events.context_user_id_bin == user_id_bytes)
|
||||
.where(EventTypes.event_type == "call_service")
|
||||
.order_by(Events.time_fired_ts)
|
||||
)
|
||||
|
||||
# Execute the query
|
||||
context_id: bytes
|
||||
time_fired_ts: float
|
||||
shared_data: str | None
|
||||
local_time_zone = dt_util.get_default_time_zone()
|
||||
for context_id, time_fired_ts, shared_data in (
|
||||
session.connection().execute(query).all()
|
||||
):
|
||||
for context_id, time_fired_ts, shared_data in data:
|
||||
# Skip if we have already processed an event that was part of this context
|
||||
if context_id in context_processed:
|
||||
continue
|
||||
@@ -156,7 +125,7 @@ def _fetch_and_process_data(
|
||||
context_processed.add(context_id)
|
||||
|
||||
# Parse the event data
|
||||
if not shared_data:
|
||||
if not time_fired_ts or not shared_data:
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -190,27 +159,26 @@ def _fetch_and_process_data(
|
||||
if not isinstance(entity_ids, list):
|
||||
entity_ids = [entity_ids]
|
||||
|
||||
# Filter out entity IDs that are not in allowed domains
|
||||
entity_ids = [
|
||||
entity_id
|
||||
for entity_id in entity_ids
|
||||
if entity_id.split(".")[0] in ALLOWED_DOMAINS
|
||||
and ((entry := ent_reg.async_get(entity_id)) is None or not entry.hidden)
|
||||
]
|
||||
# Convert to local time for time category determination
|
||||
period = time_category(
|
||||
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
|
||||
)
|
||||
period_results = results[period]
|
||||
|
||||
if not entity_ids:
|
||||
continue
|
||||
# Count entity usage
|
||||
for entity_id in entity_ids:
|
||||
if entity_id not in allowed_entities or entity_id in hidden_entities:
|
||||
continue
|
||||
|
||||
# Convert timestamp to datetime and determine time category
|
||||
if time_fired_ts:
|
||||
# Convert to local time for time category determination
|
||||
period = time_category(
|
||||
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
|
||||
)
|
||||
if (
|
||||
entity_id not in period_results
|
||||
and (entry := ent_reg.async_get(entity_id))
|
||||
and entry.hidden
|
||||
):
|
||||
hidden_entities.add(entity_id)
|
||||
continue
|
||||
|
||||
# Count entity usage
|
||||
for entity_id in entity_ids:
|
||||
results[period][entity_id] += 1
|
||||
period_results[entity_id] += 1
|
||||
|
||||
return EntityUsagePredictions(
|
||||
morning=[
|
||||
@@ -229,11 +197,40 @@ def _fetch_and_process_data(
|
||||
)
|
||||
|
||||
|
||||
def _fetch_and_process_data(
|
||||
session: Session, ent_reg: er.EntityRegistry, user_id: str
|
||||
) -> Sequence[Row[tuple[bytes | None, float | None, str | None]]]:
|
||||
"""Fetch and process service call events from the database."""
|
||||
thirty_days_ago_ts = (dt_util.utcnow() - timedelta(days=30)).timestamp()
|
||||
user_id_bytes = uuid_hex_to_bytes_or_none(user_id)
|
||||
if not user_id_bytes:
|
||||
raise ValueError("Invalid user_id format")
|
||||
|
||||
# Build the main query for events with their data
|
||||
query = (
|
||||
select(
|
||||
Events.context_id_bin,
|
||||
Events.time_fired_ts,
|
||||
EventData.shared_data,
|
||||
)
|
||||
.select_from(Events)
|
||||
.outerjoin(EventData, Events.data_id == EventData.data_id)
|
||||
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||
.where(Events.time_fired_ts >= thirty_days_ago_ts)
|
||||
.where(Events.context_user_id_bin == user_id_bytes)
|
||||
.where(EventTypes.event_type == "call_service")
|
||||
.order_by(Events.time_fired_ts)
|
||||
)
|
||||
return session.connection().execute(query).all()
|
||||
|
||||
|
||||
def _fetch_with_session(
|
||||
hass: HomeAssistant,
|
||||
fetch_func: Callable[[Session], EntityUsagePredictions],
|
||||
fetch_func: Callable[
|
||||
[Session], Sequence[Row[tuple[bytes | None, float | None, str | None]]]
|
||||
],
|
||||
*args: object,
|
||||
) -> EntityUsagePredictions:
|
||||
) -> Sequence[Row[tuple[bytes | None, float | None, str | None]]]:
|
||||
"""Execute a fetch function with a database session."""
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
return fetch_func(session, *args)
|
||||
|
@@ -141,7 +141,9 @@ class VeSyncFanHA(VeSyncBaseEntity, FanEntity):
|
||||
attr["active_time"] = self.device.state.active_time
|
||||
|
||||
if hasattr(self.device.state, "display_status"):
|
||||
attr["display_status"] = self.device.state.display_status.value
|
||||
attr["display_status"] = getattr(
|
||||
self.device.state.display_status, "value", None
|
||||
)
|
||||
|
||||
if hasattr(self.device.state, "child_lock"):
|
||||
attr["child_lock"] = self.device.state.child_lock
|
||||
|
@@ -354,13 +354,19 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensors."""
|
||||
|
||||
entities: dict[str, VolvoSensor] = {}
|
||||
coordinators = entry.runtime_data.interval_coordinators
|
||||
async_add_entities(
|
||||
VolvoSensor(coordinator, description)
|
||||
for coordinator in coordinators
|
||||
for description in _DESCRIPTIONS
|
||||
if description.api_field in coordinator.data
|
||||
)
|
||||
|
||||
for coordinator in coordinators:
|
||||
for description in _DESCRIPTIONS:
|
||||
if description.key in entities:
|
||||
continue
|
||||
|
||||
if description.api_field in coordinator.data:
|
||||
entities[description.key] = VolvoSensor(coordinator, description)
|
||||
|
||||
async_add_entities(entities.values())
|
||||
|
||||
|
||||
class VolvoSensor(VolvoEntity, SensorEntity):
|
||||
|
@@ -155,6 +155,7 @@ def validate_custom_dates(user_input: dict[str, Any]) -> None:
|
||||
subdiv=province,
|
||||
years=year,
|
||||
language=language,
|
||||
categories=[PUBLIC, *user_input.get(CONF_CATEGORY, [])],
|
||||
)
|
||||
|
||||
else:
|
||||
|
@@ -23,6 +23,7 @@ from homeassistant.components.homeassistant_hardware import silabs_multiprotocol
|
||||
from homeassistant.components.homeassistant_yellow import hardware as yellow_hardware
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_ZEROCONF,
|
||||
ConfigEntry,
|
||||
ConfigEntryBaseFlow,
|
||||
ConfigEntryState,
|
||||
@@ -183,27 +184,17 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
self._hass = hass
|
||||
self._radio_mgr.hass = hass
|
||||
|
||||
async def _get_config_entry_data(self) -> dict:
|
||||
def _get_config_entry_data(self) -> dict[str, Any]:
|
||||
"""Extract ZHA config entry data from the radio manager."""
|
||||
assert self._radio_mgr.radio_type is not None
|
||||
assert self._radio_mgr.device_path is not None
|
||||
assert self._radio_mgr.device_settings is not None
|
||||
|
||||
try:
|
||||
device_path = await self.hass.async_add_executor_job(
|
||||
usb.get_serial_by_id, self._radio_mgr.device_path
|
||||
)
|
||||
except OSError as error:
|
||||
raise AbortFlow(
|
||||
reason="cannot_resolve_path",
|
||||
description_placeholders={"path": self._radio_mgr.device_path},
|
||||
) from error
|
||||
|
||||
return {
|
||||
CONF_DEVICE: DEVICE_SCHEMA(
|
||||
{
|
||||
**self._radio_mgr.device_settings,
|
||||
CONF_DEVICE_PATH: device_path,
|
||||
CONF_DEVICE_PATH: self._radio_mgr.device_path,
|
||||
}
|
||||
),
|
||||
CONF_RADIO_TYPE: self._radio_mgr.radio_type.name,
|
||||
@@ -662,13 +653,8 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
"""Set the flow's unique ID and update the device path in an ignored flow."""
|
||||
current_entry = await self.async_set_unique_id(unique_id)
|
||||
|
||||
if not current_entry:
|
||||
return
|
||||
|
||||
if current_entry.source != SOURCE_IGNORE:
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
# Only update the current entry if it is an ignored discovery
|
||||
# Only update the current entry if it is an ignored discovery
|
||||
if current_entry and current_entry.source == SOURCE_IGNORE:
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_DEVICE: {
|
||||
@@ -703,6 +689,36 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
DOMAIN, include_ignore=False
|
||||
)
|
||||
|
||||
if self._radio_mgr.device_path is not None:
|
||||
# Ensure the radio manager device path is unique and will match ZHA's
|
||||
try:
|
||||
self._radio_mgr.device_path = await self.hass.async_add_executor_job(
|
||||
usb.get_serial_by_id, self._radio_mgr.device_path
|
||||
)
|
||||
except OSError as error:
|
||||
raise AbortFlow(
|
||||
reason="cannot_resolve_path",
|
||||
description_placeholders={"path": self._radio_mgr.device_path},
|
||||
) from error
|
||||
|
||||
# mDNS discovery can advertise the same adapter on multiple IPs or via a
|
||||
# hostname, which should be considered a duplicate
|
||||
current_device_paths = {self._radio_mgr.device_path}
|
||||
|
||||
if self.source == SOURCE_ZEROCONF:
|
||||
discovery_info = self.init_data
|
||||
current_device_paths |= {
|
||||
f"socket://{ip}:{discovery_info.port}"
|
||||
for ip in discovery_info.ip_addresses
|
||||
}
|
||||
|
||||
for entry in zha_config_entries:
|
||||
path = entry.data.get(CONF_DEVICE, {}).get(CONF_DEVICE_PATH)
|
||||
|
||||
# Abort discovery if the device path is already configured
|
||||
if path is not None and path in current_device_paths:
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
# Without confirmation, discovery can automatically progress into parts of the
|
||||
# config flow logic that interacts with hardware.
|
||||
if user_input is not None or (
|
||||
@@ -873,7 +889,7 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
zha_config_entries = self.hass.config_entries.async_entries(
|
||||
DOMAIN, include_ignore=False
|
||||
)
|
||||
data = await self._get_config_entry_data()
|
||||
data = self._get_config_entry_data()
|
||||
|
||||
if len(zha_config_entries) == 1:
|
||||
return self.async_update_reload_and_abort(
|
||||
@@ -976,7 +992,7 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow):
|
||||
# Avoid creating both `.options` and `.data` by directly writing `data` here
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry=self.config_entry,
|
||||
data=await self._get_config_entry_data(),
|
||||
data=self._get_config_entry_data(),
|
||||
options=self.config_entry.options,
|
||||
)
|
||||
|
||||
|
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.72"],
|
||||
"requirements": ["zha==0.0.73"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
@@ -2,14 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_GPS_ACCURACY,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_CONDITION,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_ZONE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
@@ -17,26 +19,22 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import ConditionErrorContainer, ConditionErrorMessage
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
|
||||
from . import in_zone
|
||||
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "zone",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("event"): vol.Any("enter", "leave"),
|
||||
}
|
||||
)
|
||||
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
}
|
||||
_CONDITION_SCHEMA = vol.Schema({CONF_OPTIONS: _OPTIONS_SCHEMA_DICT})
|
||||
|
||||
|
||||
def zone(
|
||||
@@ -95,21 +93,34 @@ def zone(
|
||||
class ZoneCondition(Condition):
|
||||
"""Zone condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
_options: dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
return cast(ConfigType, _CONDITION_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Wrap action method with zone based condition."""
|
||||
entity_ids = self._config.get(CONF_ENTITY_ID, [])
|
||||
zone_entity_ids = self._config.get(CONF_ZONE, [])
|
||||
entity_ids = self._options.get(CONF_ENTITY_ID, [])
|
||||
zone_entity_ids = self._options.get(CONF_ZONE, [])
|
||||
|
||||
@trace_condition_function
|
||||
def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
|
@@ -376,10 +376,10 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
new_addon_config = addon_config | config_updates
|
||||
|
||||
if not new_addon_config[CONF_ADDON_DEVICE]:
|
||||
new_addon_config.pop(CONF_ADDON_DEVICE)
|
||||
if not new_addon_config[CONF_ADDON_SOCKET]:
|
||||
new_addon_config.pop(CONF_ADDON_SOCKET)
|
||||
if new_addon_config.get(CONF_ADDON_DEVICE) is None:
|
||||
new_addon_config.pop(CONF_ADDON_DEVICE, None)
|
||||
if new_addon_config.get(CONF_ADDON_SOCKET) is None:
|
||||
new_addon_config.pop(CONF_ADDON_SOCKET, None)
|
||||
|
||||
if new_addon_config == addon_config:
|
||||
return
|
||||
@@ -1470,14 +1470,33 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not is_hassio(self.hass):
|
||||
return self.async_abort(reason="not_hassio")
|
||||
|
||||
if discovery_info.zwave_home_id:
|
||||
await self.async_set_unique_id(str(discovery_info.zwave_home_id))
|
||||
self._abort_if_unique_id_configured(
|
||||
{
|
||||
CONF_USB_PATH: None,
|
||||
CONF_SOCKET_PATH: discovery_info.socket_path,
|
||||
}
|
||||
if (
|
||||
discovery_info.zwave_home_id
|
||||
and (
|
||||
current_config_entries := self._async_current_entries(
|
||||
include_ignore=False
|
||||
)
|
||||
)
|
||||
and (home_id := str(discovery_info.zwave_home_id))
|
||||
and (
|
||||
existing_entry := next(
|
||||
(
|
||||
entry
|
||||
for entry in current_config_entries
|
||||
if entry.unique_id == home_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
)
|
||||
# Only update existing entries that are configured via sockets
|
||||
and existing_entry.data.get(CONF_SOCKET_PATH)
|
||||
):
|
||||
await self._async_set_addon_config(
|
||||
{CONF_ADDON_SOCKET: discovery_info.socket_path}
|
||||
)
|
||||
# Reloading will sync add-on options to config entry data
|
||||
self.hass.config_entries.async_schedule_reload(existing_entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
self.socket_path = discovery_info.socket_path
|
||||
self.context["title_placeholders"] = {
|
||||
|
@@ -21,6 +21,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
@@ -28,7 +29,6 @@ from homeassistant.helpers.trigger import (
|
||||
TriggerConfig,
|
||||
TriggerData,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@@ -20,13 +20,13 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
TriggerActionType,
|
||||
TriggerConfig,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 10
|
||||
MINOR_VERSION: Final = 11
|
||||
PATCH_VERSION: Final = "0.dev0"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
|
@@ -6807,7 +6807,8 @@
|
||||
"name": "Thread",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
"iot_class": "local_polling",
|
||||
"single_config_entry": true
|
||||
},
|
||||
"tibber": {
|
||||
"name": "Tibber",
|
||||
|
@@ -1,5 +1,13 @@
|
||||
"""Helpers for automation."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
|
||||
from .typing import ConfigType
|
||||
|
||||
|
||||
def get_absolute_description_key(domain: str, key: str) -> str:
|
||||
"""Return the absolute description key."""
|
||||
@@ -19,3 +27,26 @@ def get_relative_description_key(domain: str, key: str) -> str:
|
||||
if not subtype:
|
||||
return "_"
|
||||
return subtype[0]
|
||||
|
||||
|
||||
def move_top_level_schema_fields_to_options(
|
||||
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
|
||||
) -> ConfigType:
|
||||
"""Move top-level fields to options.
|
||||
|
||||
This function is used to help migrating old-style configs to new-style configs.
|
||||
If options is already present, the config is returned as-is.
|
||||
"""
|
||||
if CONF_OPTIONS in config:
|
||||
return config
|
||||
|
||||
config = config.copy()
|
||||
options = config.setdefault(CONF_OPTIONS, {})
|
||||
|
||||
# Move top-level fields to options
|
||||
for key_marked in options_schema_dict:
|
||||
key = key_marked.schema
|
||||
if key in config:
|
||||
options[key] = config.pop(key)
|
||||
|
||||
return config
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user