mirror of
https://github.com/home-assistant/core.git
synced 2025-09-27 05:49:27 +00:00
Compare commits
1 Commits
improve-ha
...
mqtt-subsc
Author | SHA1 | Date | |
---|---|---|---|
![]() |
79738cfa0d |
62
.github/workflows/ci.yaml
vendored
62
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -309,7 +309,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -349,7 +349,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -389,7 +389,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -525,7 +525,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
@@ -570,7 +570,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -622,7 +622,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -651,7 +651,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -684,7 +684,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -741,7 +741,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -784,7 +784,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -831,7 +831,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -883,7 +883,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -891,7 +891,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -935,7 +935,7 @@ jobs:
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -967,7 +967,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1009,7 +1009,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1042,7 +1042,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1156,7 +1156,7 @@ jobs:
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1189,7 +1189,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1310,7 +1310,7 @@ jobs:
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1345,7 +1345,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1485,7 +1485,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1518,7 +1518,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -160,7 +160,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.0
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.0
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.2"]
|
||||
"requirements": ["accuweather==4.2.1"]
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
@@ -21,7 +20,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_update_unique_id
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -33,7 +31,6 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice, str], bool]
|
||||
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||
|
||||
|
||||
BINARY_SENSORS: Final = (
|
||||
@@ -44,15 +41,46 @@ BINARY_SENSORS: Final = (
|
||||
is_on_fn=lambda device, _: device.online,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="detectionState",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
is_on_fn=lambda device, key: bool(
|
||||
device.sensors[key].value != SENSOR_STATE_OFF
|
||||
),
|
||||
key="bluetooth",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda device, _: device.bluetooth_state,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="babyCryDetectionState",
|
||||
translation_key="baby_cry_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="beepingApplianceDetectionState",
|
||||
translation_key="beeping_appliance_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="coughDetectionState",
|
||||
translation_key="cough_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="dogBarkDetectionState",
|
||||
translation_key="dog_bark_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="humanPresenceDetectionState",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="waterSoundsDetectionState",
|
||||
translation_key="water_sounds_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
is_available_fn=lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -66,22 +94,6 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "detectionState" binary sensor
|
||||
await async_update_unique_id(
|
||||
hass,
|
||||
coordinator,
|
||||
BINARY_SENSOR_DOMAIN,
|
||||
"humanPresenceDetectionState",
|
||||
"detectionState",
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
|
||||
for sensor_desc in BINARY_SENSORS
|
||||
for serial_num in coordinator.data
|
||||
if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key)
|
||||
)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -113,13 +125,3 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
||||
return self.entity_description.is_on_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
except (CannotAuthenticate, TypeError):
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
except (CannotAuthenticate, TypeError):
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
|
@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotAuthenticate as err:
|
||||
except (CannotAuthenticate, TypeError) as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
|
@@ -60,5 +60,7 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"sensors": device.sensors,
|
||||
"do not disturb": device.do_not_disturb,
|
||||
"response style": device.response_style,
|
||||
"bluetooth state": device.bluetooth_state,
|
||||
}
|
||||
|
@@ -1,4 +1,44 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"default": "mdi:bluetooth-off",
|
||||
"state": {
|
||||
"on": "mdi:bluetooth"
|
||||
}
|
||||
},
|
||||
"baby_cry_detection": {
|
||||
"default": "mdi:account-voice-off",
|
||||
"state": {
|
||||
"on": "mdi:account-voice"
|
||||
}
|
||||
},
|
||||
"beeping_appliance_detection": {
|
||||
"default": "mdi:bell-off",
|
||||
"state": {
|
||||
"on": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"cough_detection": {
|
||||
"default": "mdi:blur-off",
|
||||
"state": {
|
||||
"on": "mdi:blur"
|
||||
}
|
||||
},
|
||||
"dog_bark_detection": {
|
||||
"default": "mdi:dog-side-off",
|
||||
"state": {
|
||||
"on": "mdi:dog-side"
|
||||
}
|
||||
},
|
||||
"water_sounds_detection": {
|
||||
"default": "mdi:water-pump-off",
|
||||
"state": {
|
||||
"on": "mdi:water-pump"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_sound": {
|
||||
"service": "mdi:cast-audio"
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.6"]
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioamazondevices==6.0.0"]
|
||||
}
|
||||
|
@@ -31,9 +31,6 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
||||
"""Amazon Devices sensor entity description."""
|
||||
|
||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
|
||||
|
||||
SENSORS: Final = (
|
||||
@@ -102,13 +99,3 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.device.sensors[self.entity_description.key].value
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -58,6 +58,26 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth"
|
||||
},
|
||||
"baby_cry_detection": {
|
||||
"name": "Baby crying"
|
||||
},
|
||||
"beeping_appliance_detection": {
|
||||
"name": "Beeping appliance"
|
||||
},
|
||||
"cough_detection": {
|
||||
"name": "Coughing"
|
||||
},
|
||||
"dog_bark_detection": {
|
||||
"name": "Dog barking"
|
||||
},
|
||||
"water_sounds_detection": {
|
||||
"name": "Water sounds"
|
||||
}
|
||||
},
|
||||
"notify": {
|
||||
"speak": {
|
||||
"name": "Speak"
|
||||
|
@@ -8,17 +8,13 @@ from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import alexa_api_call, async_update_unique_id
|
||||
from .utils import alexa_api_call
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -28,17 +24,16 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Alexa Devices switch entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
subkey: str
|
||||
method: str
|
||||
|
||||
|
||||
SWITCHES: Final = (
|
||||
AmazonSwitchEntityDescription(
|
||||
key="dnd",
|
||||
key="do_not_disturb",
|
||||
subkey="AUDIO_PLAYER",
|
||||
translation_key="do_not_disturb",
|
||||
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
|
||||
is_on_fn=lambda _device: _device.do_not_disturb,
|
||||
method="set_do_not_disturb",
|
||||
),
|
||||
)
|
||||
@@ -53,11 +48,6 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "DND" switch and remove from Speaker Group
|
||||
await async_update_unique_id(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -69,7 +59,7 @@ async def async_setup_entry(
|
||||
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
|
||||
for switch_desc in SWITCHES
|
||||
for serial_num in new_devices
|
||||
if switch_desc.key in coordinator.data[serial_num].sensors
|
||||
if switch_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
)
|
||||
|
||||
_check_device()
|
||||
@@ -104,13 +94,3 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if switch is on."""
|
||||
return self.entity_description.is_on_fn(self.device)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -6,12 +6,9 @@ from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import AmazonDevicesCoordinator
|
||||
from .const import DOMAIN
|
||||
from .entity import AmazonEntity
|
||||
|
||||
|
||||
@@ -41,23 +38,3 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
) from err
|
||||
|
||||
return cmd_wrapper
|
||||
|
||||
|
||||
async def async_update_unique_id(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
domain: str,
|
||||
old_key: str,
|
||||
new_key: str,
|
||||
) -> None:
|
||||
"""Update unique id for entities created with old format."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{old_key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
|
||||
_LOGGER.debug("Updating unique_id for %s", entity_id)
|
||||
new_unique_id = unique_id.replace(old_key, new_key)
|
||||
|
||||
# Update the registry with the new unique_id
|
||||
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||
|
@@ -3,12 +3,16 @@ beolink_allstandby:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
@@ -33,6 +37,8 @@ beolink_join:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
@@ -65,12 +71,16 @@ beolink_leave:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.2"],
|
||||
"requirements": ["hass-nabucasa==1.1.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -25,27 +25,23 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .utils import async_client_session
|
||||
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = "111111"
|
||||
DEFAULT_PIN = 111111
|
||||
|
||||
|
||||
pin_regex = r"^[0-9]{4,10}$"
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
}
|
||||
|
@@ -6,13 +6,12 @@ from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
|
||||
from homeassistant.const import CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -56,40 +55,19 @@ class DeviceAutomationConditionProtocol(Protocol):
|
||||
class DeviceCondition(Condition):
|
||||
"""Device condition."""
|
||||
|
||||
_hass: HomeAssistant
|
||||
_config: ConfigType
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = await async_validate_device_automation_config(
|
||||
hass,
|
||||
complete_config,
|
||||
cv.DEVICE_CONDITION_SCHEMA,
|
||||
DeviceAutomationType.CONDITION,
|
||||
)
|
||||
# Since we don't want to migrate device conditions to a new format
|
||||
# we just pass the entire config as options.
|
||||
complete_config[CONF_OPTIONS] = complete_config.copy()
|
||||
return complete_config
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config.
|
||||
|
||||
This is here just to satisfy the abstract class interface. It is never called.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
self._hass = hass
|
||||
assert config.options is not None
|
||||
self._config = config.options
|
||||
"""Validate device condition config."""
|
||||
return await async_validate_device_automation_config(
|
||||
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
|
||||
)
|
||||
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Test a device condition."""
|
||||
|
@@ -57,7 +57,6 @@ from .manager import async_replace_device
|
||||
|
||||
ERROR_REQUIRES_ENCRYPTION_KEY = "requires_encryption_key"
|
||||
ERROR_INVALID_ENCRYPTION_KEY = "invalid_psk"
|
||||
ERROR_INVALID_PASSWORD_AUTH = "invalid_auth"
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ZERO_NOISE_PSK = "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA="
|
||||
@@ -138,11 +137,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self._password = ""
|
||||
return await self._async_authenticate_or_add()
|
||||
|
||||
if error == ERROR_INVALID_PASSWORD_AUTH or (
|
||||
error is None and self._device_info and self._device_info.uses_password
|
||||
):
|
||||
return await self.async_step_authenticate()
|
||||
|
||||
if error is None and entry_data.get(CONF_NOISE_PSK):
|
||||
# Device was configured with encryption but now connects without it.
|
||||
# Check if it's the same device before offering to remove encryption.
|
||||
@@ -696,15 +690,13 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
cli = APIClient(
|
||||
host,
|
||||
port or DEFAULT_PORT,
|
||||
self._password or "",
|
||||
"",
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
noise_psk=noise_psk,
|
||||
)
|
||||
try:
|
||||
await cli.connect()
|
||||
self._device_info = await cli.device_info()
|
||||
except InvalidAuthAPIError:
|
||||
return ERROR_INVALID_PASSWORD_AUTH
|
||||
except RequiresEncryptionAPIError:
|
||||
return ERROR_REQUIRES_ENCRYPTION_KEY
|
||||
except InvalidEncryptionKeyAPIError as ex:
|
||||
|
@@ -372,9 +372,6 @@ class ESPHomeManager:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
try:
|
||||
await self._on_connect()
|
||||
except InvalidAuthAPIError as err:
|
||||
_LOGGER.warning("Authentication failed for %s: %s", self.host, err)
|
||||
await self._start_reauth_and_disconnect()
|
||||
except APIConnectionError as err:
|
||||
_LOGGER.warning(
|
||||
"Error getting setting up connection for %s: %s", self.host, err
|
||||
@@ -644,14 +641,7 @@ class ESPHomeManager:
|
||||
if self.reconnect_logic:
|
||||
await self.reconnect_logic.stop()
|
||||
return
|
||||
await self._start_reauth_and_disconnect()
|
||||
|
||||
async def _start_reauth_and_disconnect(self) -> None:
|
||||
"""Start reauth flow and stop reconnection attempts."""
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
await self.cli.disconnect()
|
||||
if self.reconnect_logic:
|
||||
await self.reconnect_logic.stop()
|
||||
|
||||
async def _handle_dynamic_encryption_key(
|
||||
self, device_info: EsphomeDeviceInfo
|
||||
@@ -1073,7 +1063,7 @@ def _async_register_service(
|
||||
service_name,
|
||||
{
|
||||
"description": (
|
||||
f"Performs the action {service.name} of the node {device_info.name}"
|
||||
f"Calls the service {service.name} of the node {device_info.name}"
|
||||
),
|
||||
"fields": fields,
|
||||
},
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.10.0",
|
||||
"aioesphomeapi==41.9.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250925.1"]
|
||||
"requirements": ["home-assistant-frontend==20250924.0"]
|
||||
}
|
||||
|
@@ -1,10 +1,8 @@
|
||||
load_url:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
url:
|
||||
example: "https://home-assistant.io"
|
||||
required: true
|
||||
@@ -12,12 +10,10 @@ load_url:
|
||||
text:
|
||||
|
||||
set_config:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
key:
|
||||
example: "motionSensitivity"
|
||||
required: true
|
||||
@@ -30,14 +26,12 @@ set_config:
|
||||
text:
|
||||
|
||||
start_application:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
application:
|
||||
example: "de.ozerov.fully"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
|
@@ -147,10 +147,6 @@
|
||||
"name": "Load URL",
|
||||
"description": "Loads a URL on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "Device ID",
|
||||
"description": "The target device for this action."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "URL to load."
|
||||
@@ -161,10 +157,6 @@
|
||||
"name": "Set configuration",
|
||||
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
},
|
||||
"key": {
|
||||
"name": "Key",
|
||||
"description": "Configuration parameter to set."
|
||||
@@ -182,10 +174,6 @@
|
||||
"application": {
|
||||
"name": "Application",
|
||||
"description": "Package name of the application to start."
|
||||
},
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,7 @@
|
||||
set_vacation:
|
||||
target:
|
||||
device:
|
||||
integration: google_mail
|
||||
entity:
|
||||
integration: google_mail
|
||||
fields:
|
||||
|
@@ -22,6 +22,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.20.0"],
|
||||
"requirements": ["aiohomeconnect==0.19.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
@@ -32,12 +32,15 @@ set_location:
|
||||
stop:
|
||||
toggle:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_on:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_off:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
update_entity:
|
||||
fields:
|
||||
@@ -50,6 +53,8 @@ update_entity:
|
||||
reload_custom_templates:
|
||||
reload_config_entry:
|
||||
target:
|
||||
entity: {}
|
||||
device: {}
|
||||
fields:
|
||||
entry_id:
|
||||
advanced: true
|
||||
|
@@ -27,12 +27,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -75,10 +69,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -133,21 +129,14 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"flow_title": "{model}",
|
||||
"step": {
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"pick_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
@@ -169,10 +158,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -224,10 +215,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -23,16 +23,12 @@
|
||||
"description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration."
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "Configuring Thread"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "Updating adapter"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "Updating adapter"
|
||||
"title": "Installing OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is being installed."
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "Configuring Thread"
|
||||
"title": "Starting OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is now starting."
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "Failed to set up OpenThread Border Router",
|
||||
@@ -76,9 +72,7 @@
|
||||
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
|
||||
},
|
||||
"progress": {
|
||||
"install_firmware": "Installing {firmware_name} firmware. Do not make any changes to your hardware or software until this finishes.",
|
||||
"install_otbr_addon": "Installing add-on",
|
||||
"start_otbr_addon": "Starting add-on"
|
||||
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -27,12 +27,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -75,10 +69,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -133,10 +129,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
@@ -163,16 +158,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -224,10 +215,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -35,12 +35,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -98,10 +92,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -158,10 +154,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/libre_hardware_monitor",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["librehardwaremonitor-api==1.4.0"]
|
||||
"requirements": ["librehardwaremonitor-api==1.3.1"]
|
||||
}
|
||||
|
@@ -28,7 +28,7 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: done
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
|
@@ -1,5 +1,7 @@
|
||||
set_hold_time:
|
||||
target:
|
||||
device:
|
||||
integration: lyric
|
||||
entity:
|
||||
integration: lyric
|
||||
domain: climate
|
||||
|
@@ -38,7 +38,10 @@ from homeassistant.core import (
|
||||
get_hassjob_callable_job_type,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -71,6 +74,7 @@ from .const import (
|
||||
DEFAULT_WS_PATH,
|
||||
DOMAIN,
|
||||
MQTT_CONNECTION_STATE,
|
||||
MQTT_PROCESSED_SUBSCRIPTIONS,
|
||||
PROTOCOL_5,
|
||||
PROTOCOL_31,
|
||||
TRANSPORT_WEBSOCKETS,
|
||||
@@ -109,6 +113,7 @@ INITIAL_SUBSCRIBE_COOLDOWN = 0.5
|
||||
SUBSCRIBE_COOLDOWN = 0.1
|
||||
UNSUBSCRIBE_COOLDOWN = 0.1
|
||||
TIMEOUT_ACK = 10
|
||||
SUBSCRIBE_TIMEOUT = 10
|
||||
RECONNECT_INTERVAL_SECONDS = 10
|
||||
|
||||
MAX_WILDCARD_SUBSCRIBES_PER_CALL = 1
|
||||
@@ -191,11 +196,47 @@ async def async_subscribe(
|
||||
msg_callback: Callable[[ReceiveMessage], Coroutine[Any, Any, None] | None],
|
||||
qos: int = DEFAULT_QOS,
|
||||
encoding: str | None = DEFAULT_ENCODING,
|
||||
wait: bool = False,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Subscribe to an MQTT topic.
|
||||
|
||||
Call the return value to unsubscribe.
|
||||
"""
|
||||
subscription_complete: asyncio.Future[None]
|
||||
|
||||
async def _sync_mqtt_subscribe(subscriptions: list[tuple[str, int]]) -> None:
|
||||
if (topic, qos) not in subscriptions:
|
||||
return
|
||||
subscription_complete.set_result(None)
|
||||
|
||||
def _async_timeout_subscribe() -> None:
|
||||
if not subscription_complete.done():
|
||||
subscription_complete.set_exception(TimeoutError)
|
||||
|
||||
if (
|
||||
wait
|
||||
and DATA_MQTT in hass.data
|
||||
and not hass.data[DATA_MQTT].client._matching_subscriptions(topic) # noqa: SLF001
|
||||
):
|
||||
subscription_complete = hass.loop.create_future()
|
||||
dispatcher = async_dispatcher_connect(
|
||||
hass, MQTT_PROCESSED_SUBSCRIPTIONS, _sync_mqtt_subscribe
|
||||
)
|
||||
subscribe_callback = async_subscribe_internal(
|
||||
hass, topic, msg_callback, qos, encoding
|
||||
)
|
||||
try:
|
||||
hass.loop.call_later(SUBSCRIBE_TIMEOUT, _async_timeout_subscribe)
|
||||
await subscription_complete
|
||||
except TimeoutError as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="subscribe_timeout",
|
||||
) from exc
|
||||
finally:
|
||||
dispatcher()
|
||||
return subscribe_callback
|
||||
|
||||
return async_subscribe_internal(hass, topic, msg_callback, qos, encoding)
|
||||
|
||||
|
||||
@@ -963,6 +1004,7 @@ class MQTT:
|
||||
self._last_subscribe = time.monotonic()
|
||||
|
||||
await self._async_wait_for_mid_or_raise(mid, result)
|
||||
async_dispatcher_send(self.hass, MQTT_PROCESSED_SUBSCRIPTIONS, chunk_list)
|
||||
|
||||
async def _async_perform_unsubscribes(self) -> None:
|
||||
"""Perform pending MQTT client unsubscribes."""
|
||||
|
@@ -370,6 +370,7 @@ DOMAIN = "mqtt"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
MQTT_CONNECTION_STATE = "mqtt_connection_state"
|
||||
MQTT_PROCESSED_SUBSCRIPTIONS = "mqtt_processed_subscriptions"
|
||||
|
||||
PAYLOAD_EMPTY_JSON = "{}"
|
||||
PAYLOAD_NONE = "None"
|
||||
|
@@ -2,8 +2,10 @@
|
||||
"domain": "mvglive",
|
||||
"name": "MVG",
|
||||
"codeowners": [],
|
||||
"disabled": "This integration is disabled because it uses non-open source code to operate.",
|
||||
"documentation": "https://www.home-assistant.io/integrations/mvglive",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["MVG"],
|
||||
"requirements": ["mvg==1.4.0"]
|
||||
"loggers": ["MVGLive"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["PyMVGLive==1.1.4"]
|
||||
}
|
||||
|
@@ -1,14 +1,13 @@
|
||||
"""Support for departure information for public transport in Munich."""
|
||||
|
||||
# mypy: ignore-errors
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from mvg import MvgApi, MvgApiError, TransportType
|
||||
import MVGLive
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -20,7 +19,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,55 +44,53 @@ ICONS = {
|
||||
"SEV": "mdi:checkbox-blank-circle-outline",
|
||||
"-": "mdi:clock",
|
||||
}
|
||||
|
||||
ATTRIBUTION = "Data provided by mvg.de"
|
||||
ATTRIBUTION = "Data provided by MVG-live.de"
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
cv.deprecated(CONF_DIRECTIONS),
|
||||
SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_NEXT_DEPARTURE): [
|
||||
{
|
||||
vol.Required(CONF_STATION): cv.string,
|
||||
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(
|
||||
CONF_PRODUCTS, default=DEFAULT_PRODUCT
|
||||
): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
}
|
||||
]
|
||||
}
|
||||
),
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_NEXT_DEPARTURE): [
|
||||
{
|
||||
vol.Required(CONF_STATION): cv.string,
|
||||
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
|
||||
vol.Optional(
|
||||
CONF_PRODUCTS, default=DEFAULT_PRODUCT
|
||||
): cv.ensure_list_csv,
|
||||
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the MVGLive sensor."""
|
||||
sensors = [
|
||||
MVGLiveSensor(
|
||||
hass,
|
||||
nextdeparture.get(CONF_STATION),
|
||||
nextdeparture.get(CONF_DESTINATIONS),
|
||||
nextdeparture.get(CONF_LINES),
|
||||
nextdeparture.get(CONF_PRODUCTS),
|
||||
nextdeparture.get(CONF_TIMEOFFSET),
|
||||
nextdeparture.get(CONF_NUMBER),
|
||||
nextdeparture.get(CONF_NAME),
|
||||
)
|
||||
for nextdeparture in config[CONF_NEXT_DEPARTURE]
|
||||
]
|
||||
add_entities(sensors, True)
|
||||
add_entities(
|
||||
(
|
||||
MVGLiveSensor(
|
||||
nextdeparture.get(CONF_STATION),
|
||||
nextdeparture.get(CONF_DESTINATIONS),
|
||||
nextdeparture.get(CONF_DIRECTIONS),
|
||||
nextdeparture.get(CONF_LINES),
|
||||
nextdeparture.get(CONF_PRODUCTS),
|
||||
nextdeparture.get(CONF_TIMEOFFSET),
|
||||
nextdeparture.get(CONF_NUMBER),
|
||||
nextdeparture.get(CONF_NAME),
|
||||
)
|
||||
for nextdeparture in config[CONF_NEXT_DEPARTURE]
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class MVGLiveSensor(SensorEntity):
|
||||
@@ -104,38 +100,38 @@ class MVGLiveSensor(SensorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
station_name,
|
||||
station,
|
||||
destinations,
|
||||
directions,
|
||||
lines,
|
||||
products,
|
||||
timeoffset,
|
||||
number,
|
||||
name,
|
||||
) -> None:
|
||||
):
|
||||
"""Initialize the sensor."""
|
||||
self._station = station
|
||||
self._name = name
|
||||
self._station_name = station_name
|
||||
self.data = MVGLiveData(
|
||||
hass, station_name, destinations, lines, products, timeoffset, number
|
||||
station, destinations, directions, lines, products, timeoffset, number
|
||||
)
|
||||
self._state = None
|
||||
self._icon = ICONS["-"]
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
if self._name:
|
||||
return self._name
|
||||
return self._station_name
|
||||
return self._station
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
def native_value(self):
|
||||
"""Return the next departure time."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, Any] | None:
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
if not (dep := self.data.departures):
|
||||
return None
|
||||
@@ -144,114 +140,88 @@ class MVGLiveSensor(SensorEntity):
|
||||
return attr
|
||||
|
||||
@property
|
||||
def icon(self) -> str | None:
|
||||
def icon(self):
|
||||
"""Icon to use in the frontend, if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
def native_unit_of_measurement(self):
|
||||
"""Return the unit this state is expressed in."""
|
||||
return UnitOfTime.MINUTES
|
||||
|
||||
async def async_update(self) -> None:
|
||||
def update(self) -> None:
|
||||
"""Get the latest data and update the state."""
|
||||
await self.data.update()
|
||||
self.data.update()
|
||||
if not self.data.departures:
|
||||
self._state = None
|
||||
self._state = "-"
|
||||
self._icon = ICONS["-"]
|
||||
else:
|
||||
self._state = self.data.departures[0].get("time_in_mins", "-")
|
||||
self._icon = self.data.departures[0].get("icon", ICONS["-"])
|
||||
|
||||
|
||||
def _get_minutes_until_departure(departure_time: int) -> int:
|
||||
"""Calculate the time difference in minutes between the current time and a given departure time.
|
||||
|
||||
Args:
|
||||
departure_time: Unix timestamp of the departure time, in seconds.
|
||||
|
||||
Returns:
|
||||
The time difference in minutes, as an integer.
|
||||
|
||||
"""
|
||||
current_time = dt_util.utcnow()
|
||||
departure_datetime = dt_util.utc_from_timestamp(departure_time)
|
||||
time_difference = (departure_datetime - current_time).total_seconds()
|
||||
return int(time_difference / 60.0)
|
||||
self._state = self.data.departures[0].get("time", "-")
|
||||
self._icon = ICONS[self.data.departures[0].get("product", "-")]
|
||||
|
||||
|
||||
class MVGLiveData:
|
||||
"""Pull data from the mvg.de web page."""
|
||||
"""Pull data from the mvg-live.de web page."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
station_name,
|
||||
destinations,
|
||||
lines,
|
||||
products,
|
||||
timeoffset,
|
||||
number,
|
||||
) -> None:
|
||||
self, station, destinations, directions, lines, products, timeoffset, number
|
||||
):
|
||||
"""Initialize the sensor."""
|
||||
self._hass = hass
|
||||
self._station_name = station_name
|
||||
self._station_id = None
|
||||
self._station = station
|
||||
self._destinations = destinations
|
||||
self._directions = directions
|
||||
self._lines = lines
|
||||
self._products = products
|
||||
self._timeoffset = timeoffset
|
||||
self._number = number
|
||||
self.departures: list[dict[str, Any]] = []
|
||||
self._include_ubahn = "U-Bahn" in self._products
|
||||
self._include_tram = "Tram" in self._products
|
||||
self._include_bus = "Bus" in self._products
|
||||
self._include_sbahn = "S-Bahn" in self._products
|
||||
self.mvg = MVGLive.MVGLive()
|
||||
self.departures = []
|
||||
|
||||
async def update(self):
|
||||
def update(self):
|
||||
"""Update the connection data."""
|
||||
if self._station_id is None:
|
||||
try:
|
||||
station = await MvgApi.station_async(self._station_name)
|
||||
self._station_id = station["id"]
|
||||
except MvgApiError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to resolve station %s: %s", self._station_name, err
|
||||
)
|
||||
self.departures = []
|
||||
return
|
||||
|
||||
try:
|
||||
_departures = await MvgApi.departures_async(
|
||||
station_id=self._station_id,
|
||||
offset=self._timeoffset,
|
||||
limit=self._number,
|
||||
transport_types=[
|
||||
transport_type
|
||||
for transport_type in TransportType
|
||||
if transport_type.value[0] in self._products
|
||||
]
|
||||
if self._products
|
||||
else None,
|
||||
_departures = self.mvg.getlivedata(
|
||||
station=self._station,
|
||||
timeoffset=self._timeoffset,
|
||||
ubahn=self._include_ubahn,
|
||||
tram=self._include_tram,
|
||||
bus=self._include_bus,
|
||||
sbahn=self._include_sbahn,
|
||||
)
|
||||
except ValueError:
|
||||
self.departures = []
|
||||
_LOGGER.warning("Returned data not understood")
|
||||
return
|
||||
self.departures = []
|
||||
for _departure in _departures:
|
||||
for i, _departure in enumerate(_departures):
|
||||
# find the first departure meeting the criteria
|
||||
if (
|
||||
"" not in self._destinations[:1]
|
||||
and _departure["destination"] not in self._destinations
|
||||
):
|
||||
continue
|
||||
|
||||
if "" not in self._lines[:1] and _departure["line"] not in self._lines:
|
||||
if (
|
||||
"" not in self._directions[:1]
|
||||
and _departure["direction"] not in self._directions
|
||||
):
|
||||
continue
|
||||
|
||||
time_to_departure = _get_minutes_until_departure(_departure["time"])
|
||||
|
||||
if time_to_departure < self._timeoffset:
|
||||
if "" not in self._lines[:1] and _departure["linename"] not in self._lines:
|
||||
continue
|
||||
|
||||
if _departure["time"] < self._timeoffset:
|
||||
continue
|
||||
|
||||
# now select the relevant data
|
||||
_nextdep = {}
|
||||
for k in ("destination", "line", "type", "cancelled", "icon"):
|
||||
for k in ("destination", "linename", "time", "direction", "product"):
|
||||
_nextdep[k] = _departure.get(k, "")
|
||||
_nextdep["time_in_mins"] = time_to_departure
|
||||
_nextdep["time"] = int(_nextdep["time"])
|
||||
self.departures.append(_nextdep)
|
||||
if i == self._number - 1:
|
||||
break
|
||||
|
@@ -131,15 +131,7 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
# Container ID's are ephemeral, so use the container name for the unique ID
|
||||
# The first one, should always be unique, it's fine if users have aliases
|
||||
# According to Docker's API docs, the first name is unique
|
||||
device_identifier = (
|
||||
self._device_info.names[0].replace("/", " ").strip()
|
||||
if self._device_info.names
|
||||
else None
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
@@ -60,7 +60,7 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{device_name}")
|
||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{self.device_id}")
|
||||
},
|
||||
manufacturer=DEFAULT_NAME,
|
||||
model="Container",
|
||||
|
@@ -351,9 +351,13 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
def _set_current_map(self) -> None:
|
||||
if (
|
||||
self.roborock_device_info.props.status is not None
|
||||
and self.roborock_device_info.props.status.current_map is not None
|
||||
and self.roborock_device_info.props.status.map_status is not None
|
||||
):
|
||||
self.current_map = self.roborock_device_info.props.status.current_map
|
||||
# The map status represents the map flag as flag * 4 + 3 -
|
||||
# so we have to invert that in order to get the map flag that we can use to set the current map.
|
||||
self.current_map = (
|
||||
self.roborock_device_info.props.status.map_status - 3
|
||||
) // 4
|
||||
|
||||
async def set_current_map_rooms(self) -> None:
|
||||
"""Fetch all of the rooms for the current map and set on RoborockMapInfo."""
|
||||
@@ -436,7 +440,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
# If either of these fail, we don't care, and we want to continue.
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
if len(self.maps) > 1:
|
||||
if len(self.maps) != 1:
|
||||
# Set the map back to the map the user previously had selected so that it
|
||||
# does not change the end user's app.
|
||||
# Only needs to happen when we changed maps above.
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiorussound==4.8.2"],
|
||||
"requirements": ["aiorussound==4.8.1"],
|
||||
"zeroconf": ["_rio._tcp.local."]
|
||||
}
|
||||
|
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.3.0"]
|
||||
"requirements": ["pysmartthings==3.2.9"]
|
||||
}
|
||||
|
@@ -610,7 +610,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
|
||||
def _play_media_queue(
|
||||
self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue
|
||||
) -> None:
|
||||
):
|
||||
"""Manage adding, replacing, playing items onto the sonos queue."""
|
||||
_LOGGER.debug(
|
||||
"_play_media_queue item_id [%s] title [%s] enqueue [%s]",
|
||||
@@ -639,7 +639,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
media_type: MediaType | str,
|
||||
media_id: str,
|
||||
enqueue: MediaPlayerEnqueue,
|
||||
) -> None:
|
||||
):
|
||||
"""Play a directory from a music library share."""
|
||||
item = media_browser.get_media(self.media.library, media_id, media_type)
|
||||
if not item:
|
||||
@@ -660,7 +660,6 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
enqueue: MediaPlayerEnqueue,
|
||||
title: str,
|
||||
) -> None:
|
||||
"""Play a sharelink."""
|
||||
share_link = self.coordinator.share_link
|
||||
kwargs = {}
|
||||
if title:
|
||||
|
@@ -24,9 +24,8 @@ restore:
|
||||
|
||||
set_sleep_timer:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
sleep_time:
|
||||
selector:
|
||||
@@ -37,15 +36,13 @@ set_sleep_timer:
|
||||
|
||||
clear_sleep_timer:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
|
||||
play_queue:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
queue_position:
|
||||
selector:
|
||||
@@ -56,9 +53,8 @@ play_queue:
|
||||
|
||||
remove_from_queue:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
queue_position:
|
||||
selector:
|
||||
@@ -75,9 +71,8 @@ get_queue:
|
||||
|
||||
update_alarm:
|
||||
target:
|
||||
entity:
|
||||
device:
|
||||
integration: sonos
|
||||
domain: media_player
|
||||
fields:
|
||||
alarm_id:
|
||||
required: true
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""The Squeezebox integration."""
|
||||
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
@@ -32,11 +31,11 @@ from homeassistant.helpers.device_registry import (
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
CONF_HTTPS,
|
||||
DISCOVERY_INTERVAL,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
@@ -65,8 +64,6 @@ PLATFORMS = [
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
SQUEEZEBOX_HASS_DATA: HassKey[asyncio.Task] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SqueezeboxData:
|
||||
@@ -243,7 +240,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry)
|
||||
current_entries = hass.config_entries.async_entries(DOMAIN)
|
||||
if len(current_entries) == 1 and current_entries[0] == entry:
|
||||
_LOGGER.debug("Stopping server discovery task")
|
||||
hass.data[SQUEEZEBOX_HASS_DATA].cancel()
|
||||
hass.data.pop(SQUEEZEBOX_HASS_DATA)
|
||||
hass.data[DOMAIN][DISCOVERY_TASK].cancel()
|
||||
hass.data[DOMAIN].pop(DISCOVERY_TASK)
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Constants for the Squeezebox component."""
|
||||
|
||||
CONF_HTTPS = "https"
|
||||
DISCOVERY_TASK = "discovery_task"
|
||||
DOMAIN = "squeezebox"
|
||||
DEFAULT_PORT = 9000
|
||||
PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub"
|
||||
|
@@ -44,7 +44,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import SQUEEZEBOX_HASS_DATA
|
||||
from .browse_media import (
|
||||
BrowseData,
|
||||
build_item_response,
|
||||
@@ -59,6 +58,7 @@ from .const import (
|
||||
CONF_VOLUME_STEP,
|
||||
DEFAULT_BROWSE_LIMIT,
|
||||
DEFAULT_VOLUME_STEP,
|
||||
DISCOVERY_TASK,
|
||||
DOMAIN,
|
||||
SERVER_MANUFACTURER,
|
||||
SERVER_MODEL,
|
||||
@@ -110,10 +110,12 @@ async def start_server_discovery(hass: HomeAssistant) -> None:
|
||||
},
|
||||
)
|
||||
|
||||
if not hass.data.get(SQUEEZEBOX_HASS_DATA):
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
if DISCOVERY_TASK not in hass.data[DOMAIN]:
|
||||
_LOGGER.debug("Adding server discovery task for squeezebox")
|
||||
hass.data[SQUEEZEBOX_HASS_DATA] = hass.async_create_background_task(
|
||||
async_discover(_discovered_server), name="squeezebox server discovery"
|
||||
hass.data[DOMAIN][DISCOVERY_TASK] = hass.async_create_background_task(
|
||||
async_discover(_discovered_server),
|
||||
name="squeezebox server discovery",
|
||||
)
|
||||
|
||||
|
||||
|
@@ -3,18 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, cast
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.const import CONF_CONDITION, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
condition_trace_set_result,
|
||||
condition_trace_update_result,
|
||||
trace_condition_function,
|
||||
@@ -23,22 +21,20 @@ from homeassistant.helpers.sun import get_astral_event_date
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
|
||||
_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "sun",
|
||||
vol.Optional("before"): cv.sun_event,
|
||||
vol.Optional("before_offset"): cv.time_period,
|
||||
vol.Optional("after"): vol.All(
|
||||
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
|
||||
),
|
||||
vol.Optional("after_offset"): cv.time_period,
|
||||
}
|
||||
),
|
||||
vol.Optional("after_offset"): cv.time_period,
|
||||
}
|
||||
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): vol.All(
|
||||
_OPTIONS_SCHEMA_DICT,
|
||||
cv.has_at_least_one_key("before", "after"),
|
||||
)
|
||||
}
|
||||
cv.has_at_least_one_key("before", "after"),
|
||||
)
|
||||
|
||||
|
||||
@@ -129,36 +125,24 @@ def sun(
|
||||
class SunCondition(Condition):
|
||||
"""Sun condition."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _CONDITION_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Wrap action method with sun based condition."""
|
||||
before = self._options.get("before")
|
||||
after = self._options.get("after")
|
||||
before_offset = self._options.get("before_offset")
|
||||
after_offset = self._options.get("after_offset")
|
||||
before = self._config.get("before")
|
||||
after = self._config.get("after")
|
||||
before_offset = self._config.get("before_offset")
|
||||
after_offset = self._config.get("after_offset")
|
||||
|
||||
@trace_condition_function
|
||||
def sun_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
|
@@ -28,12 +28,11 @@ async def async_setup_entry(
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
entities: list[UptimeRobotBinarySensor] = []
|
||||
for monitor in coordinator.data:
|
||||
if monitor.id in known_devices:
|
||||
continue
|
||||
known_devices.add(monitor.id)
|
||||
entities.append(
|
||||
current_devices = {monitor.id for monitor in coordinator.data}
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
UptimeRobotBinarySensor(
|
||||
coordinator,
|
||||
BinarySensorEntityDescription(
|
||||
@@ -42,9 +41,9 @@ async def async_setup_entry(
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in coordinator.data
|
||||
if monitor.id in new_devices
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
_check_device()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
@@ -3,14 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import Counter
|
||||
from collections.abc import Callable, Sequence
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
from functools import cache
|
||||
import logging
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
@@ -39,11 +38,13 @@ ALLOWED_DOMAINS = {
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.CAMERA,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.FAN,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.IMAGE,
|
||||
Platform.LAWN_MOWER,
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
@@ -54,6 +55,7 @@ ALLOWED_DOMAINS = {
|
||||
Platform.SENSOR,
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.TEXT,
|
||||
Platform.VACUUM,
|
||||
Platform.VALVE,
|
||||
Platform.WATER_HEATER,
|
||||
@@ -91,32 +93,61 @@ async def async_predict_common_control(
|
||||
Args:
|
||||
hass: Home Assistant instance
|
||||
user_id: User ID to filter events by.
|
||||
|
||||
Returns:
|
||||
Dictionary with time categories as keys and lists of most common entity IDs as values
|
||||
"""
|
||||
# Get the recorder instance to ensure it's ready
|
||||
recorder = get_instance(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Execute the database operation in the recorder's executor
|
||||
data = await recorder.async_add_executor_job(
|
||||
return await recorder.async_add_executor_job(
|
||||
_fetch_with_session, hass, _fetch_and_process_data, ent_reg, user_id
|
||||
)
|
||||
|
||||
|
||||
def _fetch_and_process_data(
|
||||
session: Session, ent_reg: er.EntityRegistry, user_id: str
|
||||
) -> EntityUsagePredictions:
|
||||
"""Fetch and process service call events from the database."""
|
||||
# Prepare a dictionary to track results
|
||||
results: dict[str, Counter[str]] = {
|
||||
time_cat: Counter() for time_cat in TIME_CATEGORIES
|
||||
}
|
||||
|
||||
allowed_entities = set(hass.states.async_entity_ids(ALLOWED_DOMAINS))
|
||||
hidden_entities: set[str] = set()
|
||||
|
||||
# Keep track of contexts that we processed so that we will only process
|
||||
# the first service call in a context, and not subsequent calls.
|
||||
context_processed: set[bytes] = set()
|
||||
thirty_days_ago_ts = (dt_util.utcnow() - timedelta(days=30)).timestamp()
|
||||
user_id_bytes = uuid_hex_to_bytes_or_none(user_id)
|
||||
if not user_id_bytes:
|
||||
raise ValueError("Invalid user_id format")
|
||||
|
||||
# Build the main query for events with their data
|
||||
query = (
|
||||
select(
|
||||
Events.context_id_bin,
|
||||
Events.time_fired_ts,
|
||||
EventData.shared_data,
|
||||
)
|
||||
.select_from(Events)
|
||||
.outerjoin(EventData, Events.data_id == EventData.data_id)
|
||||
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||
.where(Events.time_fired_ts >= thirty_days_ago_ts)
|
||||
.where(Events.context_user_id_bin == user_id_bytes)
|
||||
.where(EventTypes.event_type == "call_service")
|
||||
.order_by(Events.time_fired_ts)
|
||||
)
|
||||
|
||||
# Execute the query
|
||||
context_id: bytes
|
||||
time_fired_ts: float
|
||||
shared_data: str | None
|
||||
local_time_zone = dt_util.get_default_time_zone()
|
||||
for context_id, time_fired_ts, shared_data in data:
|
||||
for context_id, time_fired_ts, shared_data in (
|
||||
session.connection().execute(query).all()
|
||||
):
|
||||
# Skip if we have already processed an event that was part of this context
|
||||
if context_id in context_processed:
|
||||
continue
|
||||
@@ -125,7 +156,7 @@ async def async_predict_common_control(
|
||||
context_processed.add(context_id)
|
||||
|
||||
# Parse the event data
|
||||
if not time_fired_ts or not shared_data:
|
||||
if not shared_data:
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -159,26 +190,27 @@ async def async_predict_common_control(
|
||||
if not isinstance(entity_ids, list):
|
||||
entity_ids = [entity_ids]
|
||||
|
||||
# Convert to local time for time category determination
|
||||
period = time_category(
|
||||
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
|
||||
)
|
||||
period_results = results[period]
|
||||
# Filter out entity IDs that are not in allowed domains
|
||||
entity_ids = [
|
||||
entity_id
|
||||
for entity_id in entity_ids
|
||||
if entity_id.split(".")[0] in ALLOWED_DOMAINS
|
||||
and ((entry := ent_reg.async_get(entity_id)) is None or not entry.hidden)
|
||||
]
|
||||
|
||||
# Count entity usage
|
||||
for entity_id in entity_ids:
|
||||
if entity_id not in allowed_entities or entity_id in hidden_entities:
|
||||
continue
|
||||
if not entity_ids:
|
||||
continue
|
||||
|
||||
if (
|
||||
entity_id not in period_results
|
||||
and (entry := ent_reg.async_get(entity_id))
|
||||
and entry.hidden
|
||||
):
|
||||
hidden_entities.add(entity_id)
|
||||
continue
|
||||
# Convert timestamp to datetime and determine time category
|
||||
if time_fired_ts:
|
||||
# Convert to local time for time category determination
|
||||
period = time_category(
|
||||
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
|
||||
)
|
||||
|
||||
period_results[entity_id] += 1
|
||||
# Count entity usage
|
||||
for entity_id in entity_ids:
|
||||
results[period][entity_id] += 1
|
||||
|
||||
return EntityUsagePredictions(
|
||||
morning=[
|
||||
@@ -197,40 +229,11 @@ async def async_predict_common_control(
|
||||
)
|
||||
|
||||
|
||||
def _fetch_and_process_data(
|
||||
session: Session, ent_reg: er.EntityRegistry, user_id: str
|
||||
) -> Sequence[Row[tuple[bytes | None, float | None, str | None]]]:
|
||||
"""Fetch and process service call events from the database."""
|
||||
thirty_days_ago_ts = (dt_util.utcnow() - timedelta(days=30)).timestamp()
|
||||
user_id_bytes = uuid_hex_to_bytes_or_none(user_id)
|
||||
if not user_id_bytes:
|
||||
raise ValueError("Invalid user_id format")
|
||||
|
||||
# Build the main query for events with their data
|
||||
query = (
|
||||
select(
|
||||
Events.context_id_bin,
|
||||
Events.time_fired_ts,
|
||||
EventData.shared_data,
|
||||
)
|
||||
.select_from(Events)
|
||||
.outerjoin(EventData, Events.data_id == EventData.data_id)
|
||||
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||
.where(Events.time_fired_ts >= thirty_days_ago_ts)
|
||||
.where(Events.context_user_id_bin == user_id_bytes)
|
||||
.where(EventTypes.event_type == "call_service")
|
||||
.order_by(Events.time_fired_ts)
|
||||
)
|
||||
return session.connection().execute(query).all()
|
||||
|
||||
|
||||
def _fetch_with_session(
|
||||
hass: HomeAssistant,
|
||||
fetch_func: Callable[
|
||||
[Session], Sequence[Row[tuple[bytes | None, float | None, str | None]]]
|
||||
],
|
||||
fetch_func: Callable[[Session], EntityUsagePredictions],
|
||||
*args: object,
|
||||
) -> Sequence[Row[tuple[bytes | None, float | None, str | None]]]:
|
||||
) -> EntityUsagePredictions:
|
||||
"""Execute a fetch function with a database session."""
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
return fetch_func(session, *args)
|
||||
|
@@ -23,7 +23,6 @@ from homeassistant.components.homeassistant_hardware import silabs_multiprotocol
|
||||
from homeassistant.components.homeassistant_yellow import hardware as yellow_hardware
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_ZEROCONF,
|
||||
ConfigEntry,
|
||||
ConfigEntryBaseFlow,
|
||||
ConfigEntryState,
|
||||
@@ -184,17 +183,27 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
|
||||
self._hass = hass
|
||||
self._radio_mgr.hass = hass
|
||||
|
||||
def _get_config_entry_data(self) -> dict[str, Any]:
|
||||
async def _get_config_entry_data(self) -> dict:
|
||||
"""Extract ZHA config entry data from the radio manager."""
|
||||
assert self._radio_mgr.radio_type is not None
|
||||
assert self._radio_mgr.device_path is not None
|
||||
assert self._radio_mgr.device_settings is not None
|
||||
|
||||
try:
|
||||
device_path = await self.hass.async_add_executor_job(
|
||||
usb.get_serial_by_id, self._radio_mgr.device_path
|
||||
)
|
||||
except OSError as error:
|
||||
raise AbortFlow(
|
||||
reason="cannot_resolve_path",
|
||||
description_placeholders={"path": self._radio_mgr.device_path},
|
||||
) from error
|
||||
|
||||
return {
|
||||
CONF_DEVICE: DEVICE_SCHEMA(
|
||||
{
|
||||
**self._radio_mgr.device_settings,
|
||||
CONF_DEVICE_PATH: self._radio_mgr.device_path,
|
||||
CONF_DEVICE_PATH: device_path,
|
||||
}
|
||||
),
|
||||
CONF_RADIO_TYPE: self._radio_mgr.radio_type.name,
|
||||
@@ -653,8 +662,13 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
"""Set the flow's unique ID and update the device path in an ignored flow."""
|
||||
current_entry = await self.async_set_unique_id(unique_id)
|
||||
|
||||
# Only update the current entry if it is an ignored discovery
|
||||
if current_entry and current_entry.source == SOURCE_IGNORE:
|
||||
if not current_entry:
|
||||
return
|
||||
|
||||
if current_entry.source != SOURCE_IGNORE:
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
# Only update the current entry if it is an ignored discovery
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_DEVICE: {
|
||||
@@ -689,36 +703,6 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
DOMAIN, include_ignore=False
|
||||
)
|
||||
|
||||
if self._radio_mgr.device_path is not None:
|
||||
# Ensure the radio manager device path is unique and will match ZHA's
|
||||
try:
|
||||
self._radio_mgr.device_path = await self.hass.async_add_executor_job(
|
||||
usb.get_serial_by_id, self._radio_mgr.device_path
|
||||
)
|
||||
except OSError as error:
|
||||
raise AbortFlow(
|
||||
reason="cannot_resolve_path",
|
||||
description_placeholders={"path": self._radio_mgr.device_path},
|
||||
) from error
|
||||
|
||||
# mDNS discovery can advertise the same adapter on multiple IPs or via a
|
||||
# hostname, which should be considered a duplicate
|
||||
current_device_paths = {self._radio_mgr.device_path}
|
||||
|
||||
if self.source == SOURCE_ZEROCONF:
|
||||
discovery_info = self.init_data
|
||||
current_device_paths |= {
|
||||
f"socket://{ip}:{discovery_info.port}"
|
||||
for ip in discovery_info.ip_addresses
|
||||
}
|
||||
|
||||
for entry in zha_config_entries:
|
||||
path = entry.data.get(CONF_DEVICE, {}).get(CONF_DEVICE_PATH)
|
||||
|
||||
# Abort discovery if the device path is already configured
|
||||
if path is not None and path in current_device_paths:
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
# Without confirmation, discovery can automatically progress into parts of the
|
||||
# config flow logic that interacts with hardware.
|
||||
if user_input is not None or (
|
||||
@@ -889,7 +873,7 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
zha_config_entries = self.hass.config_entries.async_entries(
|
||||
DOMAIN, include_ignore=False
|
||||
)
|
||||
data = self._get_config_entry_data()
|
||||
data = await self._get_config_entry_data()
|
||||
|
||||
if len(zha_config_entries) == 1:
|
||||
return self.async_update_reload_and_abort(
|
||||
@@ -992,7 +976,7 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow):
|
||||
# Avoid creating both `.options` and `.data` by directly writing `data` here
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry=self.config_entry,
|
||||
data=self._get_config_entry_data(),
|
||||
data=await self._get_config_entry_data(),
|
||||
options=self.config_entry.options,
|
||||
)
|
||||
|
||||
|
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.73"],
|
||||
"requirements": ["zha==0.0.72"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
@@ -2,16 +2,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_GPS_ACCURACY,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_CONDITION,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_ZONE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
@@ -19,22 +17,26 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import ConditionErrorContainer, ConditionErrorMessage
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
|
||||
from . import in_zone
|
||||
|
||||
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
}
|
||||
_CONDITION_SCHEMA = vol.Schema({CONF_OPTIONS: _OPTIONS_SCHEMA_DICT})
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): "zone",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required("zone"): cv.entity_ids,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("event"): vol.Any("enter", "leave"),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def zone(
|
||||
@@ -93,34 +95,21 @@ def zone(
|
||||
class ZoneCondition(Condition):
|
||||
"""Zone condition."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _CONDITION_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Wrap action method with zone based condition."""
|
||||
entity_ids = self._options.get(CONF_ENTITY_ID, [])
|
||||
zone_entity_ids = self._options.get(CONF_ZONE, [])
|
||||
entity_ids = self._config.get(CONF_ENTITY_ID, [])
|
||||
zone_entity_ids = self._config.get(CONF_ZONE, [])
|
||||
|
||||
@trace_condition_function
|
||||
def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
|
@@ -21,7 +21,6 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
@@ -29,6 +28,7 @@ from homeassistant.helpers.trigger import (
|
||||
TriggerConfig,
|
||||
TriggerData,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@@ -20,13 +20,13 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
TriggerActionType,
|
||||
TriggerConfig,
|
||||
TriggerInfo,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 11
|
||||
MINOR_VERSION: Final = 10
|
||||
PATCH_VERSION: Final = "0.dev0"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
|
@@ -1,13 +1,5 @@
|
||||
"""Helpers for automation."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
|
||||
from .typing import ConfigType
|
||||
|
||||
|
||||
def get_absolute_description_key(domain: str, key: str) -> str:
|
||||
"""Return the absolute description key."""
|
||||
@@ -27,26 +19,3 @@ def get_relative_description_key(domain: str, key: str) -> str:
|
||||
if not subtype:
|
||||
return "_"
|
||||
return subtype[0]
|
||||
|
||||
|
||||
def move_top_level_schema_fields_to_options(
|
||||
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
|
||||
) -> ConfigType:
|
||||
"""Move top-level fields to options.
|
||||
|
||||
This function is used to help migrating old-style configs to new-style configs.
|
||||
If options is already present, the config is returned as-is.
|
||||
"""
|
||||
if CONF_OPTIONS in config:
|
||||
return config
|
||||
|
||||
config = config.copy()
|
||||
options = config.setdefault(CONF_OPTIONS, {})
|
||||
|
||||
# Move top-level fields to options
|
||||
for key_marked in options_schema_dict:
|
||||
key = key_marked.schema
|
||||
if key in config:
|
||||
options[key] = config.pop(key)
|
||||
|
||||
return config
|
||||
|
@@ -6,7 +6,6 @@ import abc
|
||||
from collections import deque
|
||||
from collections.abc import Callable, Container, Coroutine, Generator, Iterable
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, time as dt_time, timedelta
|
||||
import functools as ft
|
||||
import inspect
|
||||
@@ -31,10 +30,8 @@ from homeassistant.const import (
|
||||
CONF_FOR,
|
||||
CONF_ID,
|
||||
CONF_MATCH,
|
||||
CONF_OPTIONS,
|
||||
CONF_SELECTOR,
|
||||
CONF_STATE,
|
||||
CONF_TARGET,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
CONF_WEEKDAY,
|
||||
ENTITY_MATCH_ALL,
|
||||
@@ -114,17 +111,17 @@ CONDITIONS: HassKey[dict[str, str]] = HassKey("conditions")
|
||||
|
||||
# Basic schemas to sanity check the condition descriptions,
|
||||
# full validation is done by hassfest.conditions
|
||||
_FIELD_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_FIELD_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SELECTOR): selector.validate_selector,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_CONDITION_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("target"): TargetSelector.CONFIG_SCHEMA,
|
||||
vol.Optional("fields"): vol.Schema({str: _FIELD_DESCRIPTION_SCHEMA}),
|
||||
vol.Optional("fields"): vol.Schema({str: _FIELD_SCHEMA}),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
@@ -137,10 +134,10 @@ def starts_with_dot(key: str) -> str:
|
||||
return key
|
||||
|
||||
|
||||
_CONDITIONS_DESCRIPTION_SCHEMA = vol.Schema(
|
||||
_CONDITIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Remove(vol.All(str, starts_with_dot)): object,
|
||||
cv.underscore_slug: vol.Any(None, _CONDITION_DESCRIPTION_SCHEMA),
|
||||
cv.underscore_slug: vol.Any(None, _CONDITION_SCHEMA),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -202,43 +199,11 @@ async def _register_condition_platform(
|
||||
_LOGGER.exception("Error while notifying condition platform listener")
|
||||
|
||||
|
||||
_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
**cv.CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_CONDITION): str,
|
||||
vol.Optional(CONF_OPTIONS): object,
|
||||
vol.Optional(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Condition(abc.ABC):
|
||||
"""Condition class."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config.
|
||||
|
||||
The complete config includes fields that are generic to all conditions,
|
||||
such as the alias.
|
||||
This method should be overridden by conditions that need to migrate
|
||||
from the old-style config.
|
||||
"""
|
||||
complete_config = _CONDITION_SCHEMA(complete_config)
|
||||
|
||||
specific_config: ConfigType = {}
|
||||
for key in (CONF_OPTIONS, CONF_TARGET):
|
||||
if key in complete_config:
|
||||
specific_config[key] = complete_config.pop(key)
|
||||
specific_config = await cls.async_validate_config(hass, specific_config)
|
||||
|
||||
for key in (CONF_OPTIONS, CONF_TARGET):
|
||||
if key in specific_config:
|
||||
complete_config[key] = specific_config[key]
|
||||
|
||||
return complete_config
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -247,9 +212,6 @@ class Condition(abc.ABC):
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Get the condition checker."""
|
||||
@@ -264,14 +226,6 @@ class ConditionProtocol(Protocol):
|
||||
"""Return the conditions provided by this integration."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ConditionConfig:
|
||||
"""Condition config."""
|
||||
|
||||
options: dict[str, Any] | None = None
|
||||
target: dict[str, Any] | None = None
|
||||
|
||||
|
||||
type ConditionCheckerType = Callable[[HomeAssistant, TemplateVarsType], bool | None]
|
||||
|
||||
|
||||
@@ -401,15 +355,8 @@ async def async_from_config(
|
||||
relative_condition_key = get_relative_description_key(
|
||||
platform_domain, condition_key
|
||||
)
|
||||
condition_cls = condition_descriptors[relative_condition_key]
|
||||
condition = condition_cls(
|
||||
hass,
|
||||
ConditionConfig(
|
||||
options=config.get(CONF_OPTIONS),
|
||||
target=config.get(CONF_TARGET),
|
||||
),
|
||||
)
|
||||
return await condition.async_get_checker()
|
||||
condition_instance = condition_descriptors[relative_condition_key](hass, config)
|
||||
return await condition_instance.async_get_checker()
|
||||
|
||||
for fmt in (ASYNC_FROM_CONFIG_FORMAT, FROM_CONFIG_FORMAT):
|
||||
factory = getattr(sys.modules[__name__], fmt.format(condition_key), None)
|
||||
@@ -1042,9 +989,9 @@ async def async_validate_condition_config(
|
||||
)
|
||||
if not (condition_class := condition_descriptors.get(relative_condition_key)):
|
||||
raise vol.Invalid(f"Invalid condition '{condition_key}' specified")
|
||||
return await condition_class.async_validate_complete_config(hass, config)
|
||||
return await condition_class.async_validate_config(hass, config)
|
||||
|
||||
if condition_key in ("numeric_state", "state"):
|
||||
if platform is None and condition_key in ("numeric_state", "state"):
|
||||
validator = cast(
|
||||
Callable[[HomeAssistant, ConfigType], ConfigType],
|
||||
getattr(
|
||||
@@ -1164,7 +1111,7 @@ def _load_conditions_file(integration: Integration) -> dict[str, Any]:
|
||||
try:
|
||||
return cast(
|
||||
dict[str, Any],
|
||||
_CONDITIONS_DESCRIPTION_SCHEMA(
|
||||
_CONDITIONS_SCHEMA(
|
||||
load_yaml_dict(str(integration.file_path / "conditions.yaml"))
|
||||
),
|
||||
)
|
||||
|
@@ -1545,6 +1545,9 @@ STATE_CONDITION_BASE_SCHEMA = {
|
||||
),
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
vol.Optional(CONF_FOR): positive_time_period_template,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("from"): str,
|
||||
}
|
||||
|
||||
STATE_CONDITION_STATE_SCHEMA = vol.Schema(
|
||||
|
@@ -401,6 +401,29 @@ class PluggableAction:
|
||||
await task
|
||||
|
||||
|
||||
def move_top_level_schema_fields_to_options(
|
||||
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
|
||||
) -> ConfigType:
|
||||
"""Move top-level fields to options.
|
||||
|
||||
This function is used to help migrating old-style configs to new-style configs.
|
||||
If options is already present, the config is returned as-is.
|
||||
"""
|
||||
if CONF_OPTIONS in config:
|
||||
return config
|
||||
|
||||
config = config.copy()
|
||||
options = config.setdefault(CONF_OPTIONS, {})
|
||||
|
||||
# Move top-level fields to options
|
||||
for key_marked in options_schema_dict:
|
||||
key = key_marked.schema
|
||||
if key in config:
|
||||
options[key] = config.pop(key)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
async def _async_get_trigger_platform(
|
||||
hass: HomeAssistant, trigger_key: str
|
||||
) -> tuple[str, TriggerProtocol]:
|
||||
|
@@ -121,9 +121,6 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = {
|
||||
"variable": BlockedIntegration(
|
||||
AwesomeVersion("3.4.4"), "prevents recorder from working"
|
||||
),
|
||||
# Added in 2025.10.0 because of
|
||||
# https://github.com/frenck/spook/issues/1066
|
||||
"spook": BlockedIntegration(AwesomeVersion("4.0.0"), "breaks the template engine"),
|
||||
}
|
||||
|
||||
DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey(
|
||||
|
@@ -36,10 +36,10 @@ fnv-hash-fast==1.5.0
|
||||
go2rtc-client==0.2.1
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==5.6.4
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
hassil==3.2.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20250925.1
|
||||
home-assistant-frontend==20250924.0
|
||||
home-assistant-intents==2025.9.24
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.11.0.dev0"
|
||||
version = "2025.10.0.dev0"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -47,7 +47,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.5.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==1.1.2",
|
||||
"hass-nabucasa==1.1.1",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
|
2
requirements.txt
generated
2
requirements.txt
generated
@@ -22,7 +22,7 @@ certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.6
|
||||
fnv-hash-fast==1.5.0
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
httpx==0.28.1
|
||||
home-assistant-bluetooth==1.13.1
|
||||
ifaddr==0.2.0
|
||||
|
23
requirements_all.txt
generated
23
requirements_all.txt
generated
@@ -131,7 +131,7 @@ TwitterAPI==2.7.12
|
||||
WSDiscovery==2.1.2
|
||||
|
||||
# homeassistant.components.accuweather
|
||||
accuweather==4.2.2
|
||||
accuweather==4.2.1
|
||||
|
||||
# homeassistant.components.adax
|
||||
adax==0.4.0
|
||||
@@ -185,7 +185,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.1
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.2.6
|
||||
aioamazondevices==6.0.0
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -247,7 +247,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.10.0
|
||||
aioesphomeapi==41.9.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -268,7 +268,7 @@ aioharmony==0.5.3
|
||||
aiohasupervisor==0.3.3b0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.20.0
|
||||
aiohomeconnect==0.19.0
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.18
|
||||
@@ -375,7 +375,7 @@ aioridwell==2025.09.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.8.2
|
||||
aiorussound==4.8.1
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@@ -1145,7 +1145,7 @@ habiticalib==0.4.5
|
||||
habluetooth==5.6.4
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@@ -1186,7 +1186,7 @@ hole==0.9.0
|
||||
holidays==0.81
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250925.1
|
||||
home-assistant-frontend==20250924.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.9.24
|
||||
@@ -1364,7 +1364,7 @@ libpyfoscamcgi==0.0.7
|
||||
libpyvivotek==0.4.0
|
||||
|
||||
# homeassistant.components.libre_hardware_monitor
|
||||
librehardwaremonitor-api==1.4.0
|
||||
librehardwaremonitor-api==1.3.1
|
||||
|
||||
# homeassistant.components.mikrotik
|
||||
librouteros==3.2.0
|
||||
@@ -1499,9 +1499,6 @@ mutagen==1.47.0
|
||||
# homeassistant.components.mutesync
|
||||
mutesync==0.0.1
|
||||
|
||||
# homeassistant.components.mvglive
|
||||
mvg==1.4.0
|
||||
|
||||
# homeassistant.components.permobil
|
||||
mypermobil==0.1.8
|
||||
|
||||
@@ -2384,7 +2381,7 @@ pysmappee==0.2.29
|
||||
pysmarlaapi==0.9.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.3.0
|
||||
pysmartthings==3.2.9
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.3
|
||||
@@ -3235,7 +3232,7 @@ zeroconf==0.147.2
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.73
|
||||
zha==0.0.72
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
20
requirements_test_all.txt
generated
20
requirements_test_all.txt
generated
@@ -119,7 +119,7 @@ Tami4EdgeAPI==3.0
|
||||
WSDiscovery==2.1.2
|
||||
|
||||
# homeassistant.components.accuweather
|
||||
accuweather==4.2.2
|
||||
accuweather==4.2.1
|
||||
|
||||
# homeassistant.components.adax
|
||||
adax==0.4.0
|
||||
@@ -173,7 +173,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.1
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.2.6
|
||||
aioamazondevices==6.0.0
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -235,7 +235,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.10.0
|
||||
aioesphomeapi==41.9.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -253,7 +253,7 @@ aioharmony==0.5.3
|
||||
aiohasupervisor==0.3.3b0
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.20.0
|
||||
aiohomeconnect==0.19.0
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.18
|
||||
@@ -357,7 +357,7 @@ aioridwell==2025.09.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.8.2
|
||||
aiorussound==4.8.1
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@@ -1006,7 +1006,7 @@ habiticalib==0.4.5
|
||||
habluetooth==5.6.4
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.1.2
|
||||
hass-nabucasa==1.1.1
|
||||
|
||||
# homeassistant.components.assist_satellite
|
||||
# homeassistant.components.conversation
|
||||
@@ -1035,7 +1035,7 @@ hole==0.9.0
|
||||
holidays==0.81
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250925.1
|
||||
home-assistant-frontend==20250924.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.9.24
|
||||
@@ -1180,7 +1180,7 @@ letpot==0.6.2
|
||||
libpyfoscamcgi==0.0.7
|
||||
|
||||
# homeassistant.components.libre_hardware_monitor
|
||||
librehardwaremonitor-api==1.4.0
|
||||
librehardwaremonitor-api==1.3.1
|
||||
|
||||
# homeassistant.components.mikrotik
|
||||
librouteros==3.2.0
|
||||
@@ -1987,7 +1987,7 @@ pysmappee==0.2.29
|
||||
pysmarlaapi==0.9.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.3.0
|
||||
pysmartthings==3.2.9
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.3
|
||||
@@ -2682,7 +2682,7 @@ zeroconf==0.147.2
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.73
|
||||
zha==0.0.72
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.67.1
|
||||
|
@@ -18,13 +18,15 @@ TEST_DEVICE_1 = AmazonDevice(
|
||||
online=True,
|
||||
serial_number=TEST_DEVICE_1_SN,
|
||||
software_version="echo_test_software_version",
|
||||
do_not_disturb=False,
|
||||
response_style=None,
|
||||
bluetooth_state=True,
|
||||
entity_id="11111111-2222-3333-4444-555555555555",
|
||||
endpoint_id="G1234567890123456789012345678A",
|
||||
appliance_id="G1234567890123456789012345678A",
|
||||
sensors={
|
||||
"dnd": AmazonDeviceSensor(name="dnd", value=False, error=False, scale=None),
|
||||
"temperature": AmazonDeviceSensor(
|
||||
name="temperature", value="22.5", error=False, scale="CELSIUS"
|
||||
),
|
||||
name="temperature", value="22.5", scale="CELSIUS"
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@@ -40,11 +42,14 @@ TEST_DEVICE_2 = AmazonDevice(
|
||||
online=True,
|
||||
serial_number=TEST_DEVICE_2_SN,
|
||||
software_version="echo_test_2_software_version",
|
||||
do_not_disturb=False,
|
||||
response_style=None,
|
||||
bluetooth_state=True,
|
||||
entity_id="11111111-2222-3333-4444-555555555555",
|
||||
endpoint_id="G1234567890123456789012345678A",
|
||||
appliance_id="G1234567890123456789012345678A",
|
||||
sensors={
|
||||
"temperature": AmazonDeviceSensor(
|
||||
name="temperature", value="22.5", error=False, scale="CELSIUS"
|
||||
name="temperature", value="22.5", scale="CELSIUS"
|
||||
)
|
||||
},
|
||||
)
|
||||
|
@@ -1,4 +1,52 @@
|
||||
# serializer version: 1
|
||||
# name: test_all_entities[binary_sensor.echo_test_bluetooth-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.echo_test_bluetooth',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Bluetooth',
|
||||
'platform': 'alexa_devices',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'bluetooth',
|
||||
'unique_id': 'echo_test_serial_number-bluetooth',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[binary_sensor.echo_test_bluetooth-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Echo Test Bluetooth',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.echo_test_bluetooth',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[binary_sensor.echo_test_connectivity-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# name: test_device_diagnostics
|
||||
dict({
|
||||
'account name': 'Echo Test',
|
||||
'bluetooth state': True,
|
||||
'capabilities': list([
|
||||
'AUDIO_PLAYER',
|
||||
'MICROPHONE',
|
||||
@@ -11,17 +12,9 @@
|
||||
]),
|
||||
'device family': 'mine',
|
||||
'device type': 'echo',
|
||||
'do not disturb': False,
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
'response style': None,
|
||||
'serial number': 'echo_test_serial_number',
|
||||
'software version': 'echo_test_software_version',
|
||||
})
|
||||
@@ -32,6 +25,7 @@
|
||||
'devices': list([
|
||||
dict({
|
||||
'account name': 'Echo Test',
|
||||
'bluetooth state': True,
|
||||
'capabilities': list([
|
||||
'AUDIO_PLAYER',
|
||||
'MICROPHONE',
|
||||
@@ -41,17 +35,9 @@
|
||||
]),
|
||||
'device family': 'mine',
|
||||
'device type': 'echo',
|
||||
'do not disturb': False,
|
||||
'online': True,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, scale=None)",
|
||||
}),
|
||||
'temperature': dict({
|
||||
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
|
||||
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, scale='CELSIUS')",
|
||||
}),
|
||||
}),
|
||||
'response style': None,
|
||||
'serial number': 'echo_test_serial_number',
|
||||
'software version': 'echo_test_software_version',
|
||||
}),
|
||||
|
@@ -4,6 +4,8 @@
|
||||
tuple(
|
||||
dict({
|
||||
'account_name': 'Echo Test',
|
||||
'appliance_id': 'G1234567890123456789012345678A',
|
||||
'bluetooth_state': True,
|
||||
'capabilities': list([
|
||||
'AUDIO_PLAYER',
|
||||
'MICROPHONE',
|
||||
@@ -14,18 +16,12 @@
|
||||
'device_family': 'mine',
|
||||
'device_owner_customer_id': 'amazon_ower_id',
|
||||
'device_type': 'echo',
|
||||
'endpoint_id': 'G1234567890123456789012345678A',
|
||||
'do_not_disturb': False,
|
||||
'entity_id': '11111111-2222-3333-4444-555555555555',
|
||||
'online': True,
|
||||
'response_style': None,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'error': False,
|
||||
'name': 'dnd',
|
||||
'scale': None,
|
||||
'value': False,
|
||||
}),
|
||||
'temperature': dict({
|
||||
'error': False,
|
||||
'name': 'temperature',
|
||||
'scale': 'CELSIUS',
|
||||
'value': '22.5',
|
||||
@@ -45,6 +41,8 @@
|
||||
tuple(
|
||||
dict({
|
||||
'account_name': 'Echo Test',
|
||||
'appliance_id': 'G1234567890123456789012345678A',
|
||||
'bluetooth_state': True,
|
||||
'capabilities': list([
|
||||
'AUDIO_PLAYER',
|
||||
'MICROPHONE',
|
||||
@@ -55,18 +53,12 @@
|
||||
'device_family': 'mine',
|
||||
'device_owner_customer_id': 'amazon_ower_id',
|
||||
'device_type': 'echo',
|
||||
'endpoint_id': 'G1234567890123456789012345678A',
|
||||
'do_not_disturb': False,
|
||||
'entity_id': '11111111-2222-3333-4444-555555555555',
|
||||
'online': True,
|
||||
'response_style': None,
|
||||
'sensors': dict({
|
||||
'dnd': dict({
|
||||
'error': False,
|
||||
'name': 'dnd',
|
||||
'scale': None,
|
||||
'value': False,
|
||||
}),
|
||||
'temperature': dict({
|
||||
'error': False,
|
||||
'name': 'temperature',
|
||||
'scale': 'CELSIUS',
|
||||
'value': '22.5',
|
||||
|
@@ -30,7 +30,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'do_not_disturb',
|
||||
'unique_id': 'echo_test_serial_number-dnd',
|
||||
'unique_id': 'echo_test_serial_number-do_not_disturb',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
|
@@ -123,8 +123,6 @@ async def test_dynamic_device(
|
||||
assert (state := hass.states.get(entity_id_1))
|
||||
assert state.state == STATE_ON
|
||||
|
||||
assert not hass.states.get(entity_id_2)
|
||||
|
||||
mock_amazon_devices_client.get_devices_data.return_value = {
|
||||
TEST_DEVICE_1_SN: TEST_DEVICE_1,
|
||||
TEST_DEVICE_2_SN: TEST_DEVICE_2,
|
||||
|
@@ -134,38 +134,10 @@ async def test_unit_of_measurement(
|
||||
|
||||
mock_amazon_devices_client.get_devices_data.return_value[
|
||||
TEST_DEVICE_1_SN
|
||||
].sensors = {
|
||||
sensor: AmazonDeviceSensor(
|
||||
name=sensor, value=api_value, error=False, scale=scale
|
||||
)
|
||||
}
|
||||
].sensors = {sensor: AmazonDeviceSensor(name=sensor, value=api_value, scale=scale)}
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == state_value
|
||||
assert state.attributes["unit_of_measurement"] == unit
|
||||
|
||||
|
||||
async def test_sensor_unavailable(
|
||||
hass: HomeAssistant,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
mock_amazon_devices_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test sensor is unavailable."""
|
||||
|
||||
entity_id = "sensor.echo_test_illuminance"
|
||||
|
||||
mock_amazon_devices_client.get_devices_data.return_value[
|
||||
TEST_DEVICE_1_SN
|
||||
].sensors = {
|
||||
"illuminance": AmazonDeviceSensor(
|
||||
name="illuminance", value="800", error=True, scale=None
|
||||
)
|
||||
}
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
@@ -1,9 +1,7 @@
|
||||
"""Tests for the Alexa Devices switch platform."""
|
||||
|
||||
from copy import deepcopy
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aioamazondevices.api import AmazonDeviceSensor
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
@@ -25,12 +23,10 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
from .conftest import TEST_DEVICE_1, TEST_DEVICE_1_SN
|
||||
from .conftest import TEST_DEVICE_1_SN
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||
|
||||
ENTITY_ID = "switch.echo_test_do_not_disturb"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_all_entities(
|
||||
@@ -56,59 +52,48 @@ async def test_switch_dnd(
|
||||
"""Test switching DND."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert (state := hass.states.get(ENTITY_ID))
|
||||
entity_id = "switch.echo_test_do_not_disturb"
|
||||
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_OFF
|
||||
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: ENTITY_ID},
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert mock_amazon_devices_client.set_do_not_disturb.call_count == 1
|
||||
|
||||
device_data = deepcopy(TEST_DEVICE_1)
|
||||
device_data.sensors = {
|
||||
"dnd": AmazonDeviceSensor(name="dnd", value=True, error=False, scale=None),
|
||||
"temperature": AmazonDeviceSensor(
|
||||
name="temperature", value="22.5", error=False, scale="CELSIUS"
|
||||
),
|
||||
}
|
||||
mock_amazon_devices_client.get_devices_data.return_value = {
|
||||
TEST_DEVICE_1_SN: device_data
|
||||
}
|
||||
mock_amazon_devices_client.get_devices_data.return_value[
|
||||
TEST_DEVICE_1_SN
|
||||
].do_not_disturb = True
|
||||
|
||||
freezer.tick(SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (state := hass.states.get(ENTITY_ID))
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_ON
|
||||
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: ENTITY_ID},
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
device_data.sensors = {
|
||||
"dnd": AmazonDeviceSensor(name="dnd", value=False, error=False, scale=None),
|
||||
"temperature": AmazonDeviceSensor(
|
||||
name="temperature", value="22.5", error=False, scale="CELSIUS"
|
||||
),
|
||||
}
|
||||
mock_amazon_devices_client.get_devices_data.return_value = {
|
||||
TEST_DEVICE_1_SN: device_data
|
||||
}
|
||||
mock_amazon_devices_client.get_devices_data.return_value[
|
||||
TEST_DEVICE_1_SN
|
||||
].do_not_disturb = False
|
||||
|
||||
freezer.tick(SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_amazon_devices_client.set_do_not_disturb.call_count == 2
|
||||
assert (state := hass.states.get(ENTITY_ID))
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_OFF
|
||||
|
||||
|
||||
@@ -119,13 +104,16 @@ async def test_offline_device(
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test offline device handling."""
|
||||
|
||||
entity_id = "switch.echo_test_do_not_disturb"
|
||||
|
||||
mock_amazon_devices_client.get_devices_data.return_value[
|
||||
TEST_DEVICE_1_SN
|
||||
].online = False
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert (state := hass.states.get(ENTITY_ID))
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
mock_amazon_devices_client.get_devices_data.return_value[
|
||||
@@ -136,5 +124,5 @@ async def test_offline_device(
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (state := hass.states.get(ENTITY_ID))
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state != STATE_UNAVAILABLE
|
||||
|
@@ -10,10 +10,8 @@ from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TUR
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
from .const import TEST_DEVICE_1_SN
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@@ -56,41 +54,3 @@ async def test_alexa_api_call_exceptions(
|
||||
assert exc_info.value.translation_domain == DOMAIN
|
||||
assert exc_info.value.translation_key == key
|
||||
assert exc_info.value.translation_placeholders == {"error": error}
|
||||
|
||||
|
||||
async def test_alexa_unique_id_migration(
|
||||
hass: HomeAssistant,
|
||||
mock_amazon_devices_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test unique_id migration."""
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
identifiers={(DOMAIN, mock_config_entry.entry_id)},
|
||||
name=mock_config_entry.title,
|
||||
manufacturer="Amazon",
|
||||
model="Echo Dot",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
entity = entity_registry.async_get_or_create(
|
||||
SWITCH_DOMAIN,
|
||||
DOMAIN,
|
||||
unique_id=f"{TEST_DEVICE_1_SN}-do_not_disturb",
|
||||
device_id=device.id,
|
||||
config_entry=mock_config_entry,
|
||||
has_entity_name=True,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
migrated_entity = entity_registry.async_get(entity.entity_id)
|
||||
assert migrated_entity is not None
|
||||
assert migrated_entity.config_entry_id == mock_config_entry.entry_id
|
||||
assert migrated_entity.unique_id == f"{TEST_DEVICE_1_SN}-dnd"
|
||||
|
@@ -20,14 +20,13 @@ from aiocomelit.const import (
|
||||
|
||||
BRIDGE_HOST = "fake_bridge_host"
|
||||
BRIDGE_PORT = 80
|
||||
BRIDGE_PIN = "1234"
|
||||
BRIDGE_PIN = 1234
|
||||
|
||||
VEDO_HOST = "fake_vedo_host"
|
||||
VEDO_PORT = 8080
|
||||
VEDO_PIN = "5678"
|
||||
VEDO_PIN = 5678
|
||||
|
||||
FAKE_PIN = "0000"
|
||||
BAD_PIN = "abcd"
|
||||
FAKE_PIN = 0000
|
||||
|
||||
LIGHT0 = ComelitSerialBridgeObject(
|
||||
index=0,
|
||||
|
@@ -10,10 +10,9 @@ from homeassistant.components.comelit.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType, InvalidData
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from .const import (
|
||||
BAD_PIN,
|
||||
BRIDGE_HOST,
|
||||
BRIDGE_PIN,
|
||||
BRIDGE_PORT,
|
||||
@@ -311,46 +310,3 @@ async def test_reconfigure_fails(
|
||||
CONF_PIN: BRIDGE_PIN,
|
||||
CONF_TYPE: BRIDGE,
|
||||
}
|
||||
|
||||
|
||||
async def test_pin_format_serial_bridge(
|
||||
hass: HomeAssistant,
|
||||
mock_serial_bridge: AsyncMock,
|
||||
mock_serial_bridge_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test PIN is valid format."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
with pytest.raises(InvalidData):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
CONF_HOST: BRIDGE_HOST,
|
||||
CONF_PORT: BRIDGE_PORT,
|
||||
CONF_PIN: BAD_PIN,
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
CONF_HOST: BRIDGE_HOST,
|
||||
CONF_PORT: BRIDGE_PORT,
|
||||
CONF_PIN: BRIDGE_PIN,
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["data"] == {
|
||||
CONF_HOST: BRIDGE_HOST,
|
||||
CONF_PORT: BRIDGE_PORT,
|
||||
CONF_PIN: BRIDGE_PIN,
|
||||
CONF_TYPE: BRIDGE,
|
||||
}
|
||||
assert not result["result"].unique_id
|
||||
await hass.async_block_till_done()
|
||||
|
@@ -1184,42 +1184,6 @@ async def test_reauth_attempt_to_change_mac_aborts(
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_zeroconf", "mock_setup_entry")
|
||||
async def test_reauth_password_changed(
|
||||
hass: HomeAssistant, mock_client: APIClient
|
||||
) -> None:
|
||||
"""Test reauth when password has changed."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: "127.0.0.1", CONF_PORT: 6053, CONF_PASSWORD: "old_password"},
|
||||
unique_id="11:22:33:44:55:aa",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
mock_client.connect.side_effect = InvalidAuthAPIError("Invalid password")
|
||||
|
||||
result = await entry.start_reauth_flow(hass)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "authenticate"
|
||||
assert result["description_placeholders"] == {
|
||||
"name": "Mock Title",
|
||||
}
|
||||
|
||||
mock_client.connect.side_effect = None
|
||||
mock_client.connect.return_value = None
|
||||
mock_client.device_info.return_value = DeviceInfo(
|
||||
uses_password=True, name="test", mac_address="11:22:33:44:55:aa"
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_PASSWORD: "new_password"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
assert entry.data[CONF_PASSWORD] == "new_password"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_setup_entry", "mock_zeroconf")
|
||||
async def test_reauth_fixed_via_dashboard(
|
||||
hass: HomeAssistant,
|
||||
@@ -1275,7 +1239,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password(
|
||||
) -> None:
|
||||
"""Test reauth fixed automatically via dashboard with password removed."""
|
||||
mock_client.device_info.side_effect = (
|
||||
InvalidEncryptionKeyAPIError("Wrong key", "test"),
|
||||
InvalidAuthAPIError,
|
||||
DeviceInfo(uses_password=False, name="test", mac_address="11:22:33:44:55:aa"),
|
||||
)
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from aioesphomeapi import APIClient, DeviceInfo, InvalidEncryptionKeyAPIError
|
||||
from aioesphomeapi import APIClient, DeviceInfo, InvalidAuthAPIError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.esphome import CONF_NOISE_PSK, DOMAIN, dashboard
|
||||
@@ -194,7 +194,7 @@ async def test_new_dashboard_fix_reauth(
|
||||
) -> None:
|
||||
"""Test config entries waiting for reauth are triggered."""
|
||||
mock_client.device_info.side_effect = (
|
||||
InvalidEncryptionKeyAPIError("Wrong key", "test"),
|
||||
InvalidAuthAPIError,
|
||||
DeviceInfo(uses_password=False, name="test", mac_address="11:22:33:44:55:AA"),
|
||||
)
|
||||
|
||||
|
@@ -1455,37 +1455,6 @@ async def test_no_reauth_wrong_mac(
|
||||
)
|
||||
|
||||
|
||||
async def test_auth_error_during_on_connect_triggers_reauth(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
) -> None:
|
||||
"""Test that InvalidAuthAPIError during on_connect triggers reauth."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id="11:22:33:44:55:aa",
|
||||
data={
|
||||
CONF_HOST: "test.local",
|
||||
CONF_PORT: 6053,
|
||||
CONF_PASSWORD: "wrong_password",
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
mock_client.device_info_and_list_entities = AsyncMock(
|
||||
side_effect=InvalidAuthAPIError("Invalid password!")
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
flows = hass.config_entries.flow.async_progress(DOMAIN)
|
||||
assert len(flows) == 1
|
||||
assert flows[0]["context"]["source"] == "reauth"
|
||||
assert flows[0]["context"]["entry_id"] == entry.entry_id
|
||||
assert mock_client.disconnect.call_count >= 1
|
||||
|
||||
|
||||
async def test_entry_missing_unique_id(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
|
@@ -282,6 +282,65 @@ async def test_subscribe_topic(
|
||||
unsub()
|
||||
|
||||
|
||||
async def test_subscribe_topic_and_wait(
|
||||
hass: HomeAssistant,
|
||||
mock_debouncer: asyncio.Event,
|
||||
setup_with_birth_msg_client_mock: MqttMockPahoClient,
|
||||
recorded_calls: list[ReceiveMessage],
|
||||
record_calls: MessageCallbackType,
|
||||
) -> None:
|
||||
"""Test the subscription of a topic."""
|
||||
await mock_debouncer.wait()
|
||||
mock_debouncer.clear()
|
||||
unsub_no_wait = await mqtt.async_subscribe(hass, "other-test-topic/#", record_calls)
|
||||
unsub_wait = await mqtt.async_subscribe(hass, "test-topic", record_calls, wait=True)
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic", "test-payload")
|
||||
async_fire_mqtt_message(hass, "other-test-topic/test", "other-test-payload")
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(recorded_calls) == 2
|
||||
assert recorded_calls[0].topic == "test-topic"
|
||||
assert recorded_calls[0].payload == "test-payload"
|
||||
assert recorded_calls[1].topic == "other-test-topic/test"
|
||||
assert recorded_calls[1].payload == "other-test-payload"
|
||||
|
||||
unsub_no_wait()
|
||||
unsub_wait()
|
||||
|
||||
async_fire_mqtt_message(hass, "test-topic", "test-payload")
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(recorded_calls) == 2
|
||||
|
||||
# Cannot unsubscribe twice
|
||||
with pytest.raises(HomeAssistantError):
|
||||
unsub_no_wait()
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
unsub_wait()
|
||||
|
||||
|
||||
async def test_subscribe_topic_and_wait_timeout(
|
||||
hass: HomeAssistant,
|
||||
mock_debouncer: asyncio.Event,
|
||||
setup_with_birth_msg_client_mock: MqttMockPahoClient,
|
||||
recorded_calls: list[ReceiveMessage],
|
||||
record_calls: MessageCallbackType,
|
||||
) -> None:
|
||||
"""Test the subscription of a topic."""
|
||||
await mock_debouncer.wait()
|
||||
mock_debouncer.clear()
|
||||
with (
|
||||
patch("homeassistant.components.mqtt.client.SUBSCRIBE_TIMEOUT", 0),
|
||||
pytest.raises(HomeAssistantError) as exc,
|
||||
):
|
||||
await mqtt.async_subscribe(hass, "test-topic", record_calls, wait=True)
|
||||
|
||||
assert exc.value.translation_domain == mqtt.DOMAIN
|
||||
assert exc.value.translation_key == "subscribe_timeout"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mqtt_mock_entry")
|
||||
async def test_subscribe_topic_not_initialize(
|
||||
hass: HomeAssistant, record_calls: MessageCallbackType
|
||||
|
@@ -30,7 +30,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'status',
|
||||
'unique_id': 'portainer_test_entry_123_focused_einstein_status',
|
||||
'unique_id': 'portainer_test_entry_123_dd19facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
@@ -79,7 +79,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'status',
|
||||
'unique_id': 'portainer_test_entry_123_funny_chatelet_status',
|
||||
'unique_id': 'portainer_test_entry_123_aa86eacfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
@@ -177,7 +177,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'status',
|
||||
'unique_id': 'portainer_test_entry_123_practical_morse_status',
|
||||
'unique_id': 'portainer_test_entry_123_ee20facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
@@ -226,7 +226,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'status',
|
||||
'unique_id': 'portainer_test_entry_123_serene_banach_status',
|
||||
'unique_id': 'portainer_test_entry_123_bb97facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
@@ -275,7 +275,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'status',
|
||||
'unique_id': 'portainer_test_entry_123_stoic_turing_status',
|
||||
'unique_id': 'portainer_test_entry_123_cc08facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
|
@@ -5,7 +5,6 @@ from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from roborock import MultiMapsList
|
||||
from roborock.exceptions import RoborockException
|
||||
from vacuum_map_parser_base.config.color import SupportedColor
|
||||
|
||||
@@ -136,30 +135,3 @@ async def test_dynamic_local_scan_interval(
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + interval)
|
||||
|
||||
assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "20"
|
||||
|
||||
|
||||
async def test_no_maps(
|
||||
hass: HomeAssistant,
|
||||
mock_roborock_entry: MockConfigEntry,
|
||||
bypass_api_fixture: None,
|
||||
) -> None:
|
||||
"""Test that a device with no maps is handled correctly."""
|
||||
prop = copy.deepcopy(PROP)
|
||||
prop.status.map_status = 252
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop",
|
||||
return_value=prop,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_multi_maps_list",
|
||||
return_value=MultiMapsList(
|
||||
max_multi_map=1, max_bak_map=1, multi_map_count=0, map_info=[]
|
||||
),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.roborock.RoborockMqttClientV1.load_multi_map"
|
||||
) as load_map,
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_roborock_entry.entry_id)
|
||||
assert load_map.call_count == 0
|
||||
|
@@ -83,10 +83,7 @@ async def test_if_action_before_sunrise_no_offset(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "before": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -159,10 +156,7 @@ async def test_if_action_after_sunrise_no_offset(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "after": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -237,10 +231,8 @@ async def test_if_action_before_sunrise_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {
|
||||
"before": SUN_EVENT_SUNRISE,
|
||||
"before_offset": "+1:00:00",
|
||||
},
|
||||
"before": SUN_EVENT_SUNRISE,
|
||||
"before_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -364,7 +356,8 @@ async def test_if_action_before_sunset_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": "sunset", "before_offset": "+1:00:00"},
|
||||
"before": "sunset",
|
||||
"before_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -488,7 +481,8 @@ async def test_if_action_after_sunrise_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE, "after_offset": "+1:00:00"},
|
||||
"after": SUN_EVENT_SUNRISE,
|
||||
"after_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -636,7 +630,8 @@ async def test_if_action_after_sunset_with_offset(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": "sunset", "after_offset": "+1:00:00"},
|
||||
"after": "sunset",
|
||||
"after_offset": "+1:00:00",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -712,7 +707,8 @@ async def test_if_action_after_and_before_during(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE, "before": SUN_EVENT_SUNSET},
|
||||
"after": SUN_EVENT_SUNRISE,
|
||||
"before": SUN_EVENT_SUNSET,
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -816,7 +812,8 @@ async def test_if_action_before_or_after_during(
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNRISE, "after": SUN_EVENT_SUNSET},
|
||||
"before": SUN_EVENT_SUNRISE,
|
||||
"after": SUN_EVENT_SUNSET,
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
@@ -944,10 +941,7 @@ async def test_if_action_before_sunrise_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "before": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -1026,10 +1020,7 @@ async def test_if_action_after_sunrise_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNRISE},
|
||||
},
|
||||
"condition": {"condition": "sun", "after": SUN_EVENT_SUNRISE},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -1108,10 +1099,7 @@ async def test_if_action_before_sunset_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"before": SUN_EVENT_SUNSET},
|
||||
},
|
||||
"condition": {"condition": "sun", "before": SUN_EVENT_SUNSET},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
@@ -1190,10 +1178,7 @@ async def test_if_action_after_sunset_no_offset_kotzebue(
|
||||
automation.DOMAIN: {
|
||||
"id": "sun",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"condition": {
|
||||
"condition": "sun",
|
||||
"options": {"after": SUN_EVENT_SUNSET},
|
||||
},
|
||||
"condition": {"condition": "sun", "after": SUN_EVENT_SUNSET},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
|
@@ -62,15 +62,9 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
|
||||
"""Test function with actual service call events in database."""
|
||||
user_id = str(uuid.uuid4())
|
||||
|
||||
hass.states.async_set("light.living_room", "off")
|
||||
hass.states.async_set("light.kitchen", "off")
|
||||
hass.states.async_set("climate.thermostat", "off")
|
||||
hass.states.async_set("light.bedroom", "off")
|
||||
hass.states.async_set("lock.front_door", "locked")
|
||||
|
||||
# Create service call events at different times of day
|
||||
# Morning events - use separate service calls to get around context deduplication
|
||||
with freeze_time("2023-07-01 07:00:00"): # Morning
|
||||
with freeze_time("2023-07-01 07:00:00+00:00"): # Morning
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
{
|
||||
@@ -83,7 +77,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Afternoon events
|
||||
with freeze_time("2023-07-01 14:00:00"): # Afternoon
|
||||
with freeze_time("2023-07-01 14:00:00+00:00"): # Afternoon
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
{
|
||||
@@ -96,7 +90,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Evening events
|
||||
with freeze_time("2023-07-01 19:00:00"): # Evening
|
||||
with freeze_time("2023-07-01 19:00:00+00:00"): # Evening
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
{
|
||||
@@ -109,7 +103,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Night events
|
||||
with freeze_time("2023-07-01 23:00:00"): # Night
|
||||
with freeze_time("2023-07-01 23:00:00+00:00"): # Night
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
{
|
||||
@@ -125,7 +119,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
# Get predictions - make sure we're still in a reasonable timeframe
|
||||
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
|
||||
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
|
||||
results = await async_predict_common_control(hass, user_id)
|
||||
|
||||
# Verify results contain the expected entities in the correct time periods
|
||||
@@ -157,12 +151,7 @@ async def test_multiple_entities_in_one_call(hass: HomeAssistant) -> None:
|
||||
suggested_object_id="kitchen",
|
||||
)
|
||||
|
||||
hass.states.async_set("light.living_room", "off")
|
||||
hass.states.async_set("light.kitchen", "off")
|
||||
hass.states.async_set("light.hallway", "off")
|
||||
hass.states.async_set("not_allowed.domain", "off")
|
||||
|
||||
with freeze_time("2023-07-01 10:00:00"): # Morning
|
||||
with freeze_time("2023-07-01 10:00:00+00:00"): # Morning
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
{
|
||||
@@ -174,7 +163,6 @@ async def test_multiple_entities_in_one_call(hass: HomeAssistant) -> None:
|
||||
"light.kitchen",
|
||||
"light.hallway",
|
||||
"not_allowed.domain",
|
||||
"light.not_in_state_machine",
|
||||
]
|
||||
},
|
||||
},
|
||||
@@ -184,7 +172,7 @@ async def test_multiple_entities_in_one_call(hass: HomeAssistant) -> None:
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
|
||||
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
|
||||
results = await async_predict_common_control(hass, user_id)
|
||||
|
||||
# Two lights should be counted (10:00 UTC = 02:00 local = night)
|
||||
@@ -201,10 +189,7 @@ async def test_context_deduplication(hass: HomeAssistant) -> None:
|
||||
user_id = str(uuid.uuid4())
|
||||
context = Context(user_id=user_id)
|
||||
|
||||
hass.states.async_set("light.living_room", "off")
|
||||
hass.states.async_set("switch.coffee_maker", "off")
|
||||
|
||||
with freeze_time("2023-07-01 10:00:00"): # Morning
|
||||
with freeze_time("2023-07-01 10:00:00+00:00"): # Morning
|
||||
# Fire multiple events with the same context
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
@@ -230,7 +215,7 @@ async def test_context_deduplication(hass: HomeAssistant) -> None:
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
|
||||
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
|
||||
results = await async_predict_common_control(hass, user_id)
|
||||
|
||||
# Only the first event should be processed (10:00 UTC = 02:00 local = night)
|
||||
@@ -247,11 +232,8 @@ async def test_old_events_excluded(hass: HomeAssistant) -> None:
|
||||
"""Test that events older than 30 days are excluded."""
|
||||
user_id = str(uuid.uuid4())
|
||||
|
||||
hass.states.async_set("light.old_event", "off")
|
||||
hass.states.async_set("light.recent_event", "off")
|
||||
|
||||
# Create an old event (35 days ago)
|
||||
with freeze_time("2023-05-27 10:00:00"): # 35 days before July 1st
|
||||
with freeze_time("2023-05-27 10:00:00+00:00"): # 35 days before July 1st
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
{
|
||||
@@ -264,7 +246,7 @@ async def test_old_events_excluded(hass: HomeAssistant) -> None:
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Create a recent event (5 days ago)
|
||||
with freeze_time("2023-06-26 10:00:00"): # 5 days before July 1st
|
||||
with freeze_time("2023-06-26 10:00:00+00:00"): # 5 days before July 1st
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
{
|
||||
@@ -279,7 +261,7 @@ async def test_old_events_excluded(hass: HomeAssistant) -> None:
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
# Query with current time
|
||||
with freeze_time("2023-07-01 10:00:00"):
|
||||
with freeze_time("2023-07-01 10:00:00+00:00"):
|
||||
results = await async_predict_common_control(hass, user_id)
|
||||
|
||||
# Only recent event should be included (10:00 UTC = 02:00 local = night)
|
||||
@@ -296,16 +278,8 @@ async def test_entities_limit(hass: HomeAssistant) -> None:
|
||||
"""Test that only top entities are returned per time category."""
|
||||
user_id = str(uuid.uuid4())
|
||||
|
||||
hass.states.async_set("light.most_used", "off")
|
||||
hass.states.async_set("light.second", "off")
|
||||
hass.states.async_set("light.third", "off")
|
||||
hass.states.async_set("light.fourth", "off")
|
||||
hass.states.async_set("light.fifth", "off")
|
||||
hass.states.async_set("light.sixth", "off")
|
||||
hass.states.async_set("light.seventh", "off")
|
||||
|
||||
# Create more than 5 different entities in morning
|
||||
with freeze_time("2023-07-01 08:00:00"):
|
||||
with freeze_time("2023-07-01 08:00:00+00:00"):
|
||||
# Create entities with different frequencies
|
||||
entities_with_counts = [
|
||||
("light.most_used", 10),
|
||||
@@ -334,7 +308,7 @@ async def test_entities_limit(hass: HomeAssistant) -> None:
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with (
|
||||
freeze_time("2023-07-02 10:00:00"),
|
||||
freeze_time("2023-07-02 10:00:00+00:00"),
|
||||
patch(
|
||||
"homeassistant.components.usage_prediction.common_control.RESULTS_TO_INCLUDE",
|
||||
5,
|
||||
@@ -361,10 +335,7 @@ async def test_different_users_separated(hass: HomeAssistant) -> None:
|
||||
user_id_1 = str(uuid.uuid4())
|
||||
user_id_2 = str(uuid.uuid4())
|
||||
|
||||
hass.states.async_set("light.user1_light", "off")
|
||||
hass.states.async_set("light.user2_light", "off")
|
||||
|
||||
with freeze_time("2023-07-01 10:00:00"):
|
||||
with freeze_time("2023-07-01 10:00:00+00:00"):
|
||||
# User 1 events
|
||||
hass.bus.async_fire(
|
||||
EVENT_CALL_SERVICE,
|
||||
@@ -392,7 +363,7 @@ async def test_different_users_separated(hass: HomeAssistant) -> None:
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
# Get results for each user
|
||||
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
|
||||
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
|
||||
results_user1 = await async_predict_common_control(hass, user_id_1)
|
||||
results_user2 = await async_predict_common_control(hass, user_id_2)
|
||||
|
||||
|
@@ -708,8 +708,8 @@ async def test_multiple_zha_entries_aborts(hass: HomeAssistant, mock_app) -> Non
|
||||
|
||||
|
||||
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
|
||||
async def test_discovery_via_usb_duplicate_unique_id(hass: HomeAssistant) -> None:
|
||||
"""Test USB discovery when a config entry with a duplicate unique_id already exists."""
|
||||
async def test_discovery_via_usb_path_does_not_change(hass: HomeAssistant) -> None:
|
||||
"""Test usb flow already set up and the path does not change."""
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
@@ -737,8 +737,13 @@ async def test_discovery_via_usb_duplicate_unique_id(hass: HomeAssistant) -> Non
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "confirm"
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
assert entry.data[CONF_DEVICE] == {
|
||||
CONF_DEVICE_PATH: "/dev/ttyUSB1",
|
||||
CONF_BAUDRATE: 115200,
|
||||
CONF_FLOW_CONTROL: None,
|
||||
}
|
||||
|
||||
|
||||
@patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True))
|
||||
@@ -852,40 +857,6 @@ async def test_discovery_via_usb_zha_ignored_updates(hass: HomeAssistant) -> Non
|
||||
}
|
||||
|
||||
|
||||
async def test_discovery_via_usb_same_device_already_setup(hass: HomeAssistant) -> None:
|
||||
"""Test discovery aborting if ZHA is already setup."""
|
||||
MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_DEVICE: {CONF_DEVICE_PATH: "/dev/serial/by-id/usb-device123"}},
|
||||
).add_to_hass(hass)
|
||||
|
||||
# Discovery info with the same device but different path format
|
||||
discovery_info = UsbServiceInfo(
|
||||
device="/dev/ttyUSB0",
|
||||
pid="AAAA",
|
||||
vid="AAAA",
|
||||
serial_number="1234",
|
||||
description="zigbee radio",
|
||||
manufacturer="test",
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.zha.config_flow.usb.get_serial_by_id",
|
||||
return_value="/dev/serial/by-id/usb-device123",
|
||||
) as mock_get_serial_by_id:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USB}, data=discovery_info
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify get_serial_by_id was called to normalize the path
|
||||
assert mock_get_serial_by_id.mock_calls == [call("/dev/ttyUSB0")]
|
||||
|
||||
# Should abort since it's the same device
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
|
||||
|
||||
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
|
||||
@patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True))
|
||||
async def test_legacy_zeroconf_discovery_already_setup(hass: HomeAssistant) -> None:
|
||||
@@ -919,39 +890,6 @@ async def test_legacy_zeroconf_discovery_already_setup(hass: HomeAssistant) -> N
|
||||
assert confirm_result["step_id"] == "choose_migration_strategy"
|
||||
|
||||
|
||||
async def test_zeroconf_discovery_via_socket_already_setup_with_ip_match(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test zeroconf discovery aborting when ZHA is already setup with socket and one IP matches."""
|
||||
MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_DEVICE: {CONF_DEVICE_PATH: "socket://192.168.1.101:6638"}},
|
||||
).add_to_hass(hass)
|
||||
|
||||
service_info = ZeroconfServiceInfo(
|
||||
ip_address=ip_address("192.168.1.100"),
|
||||
ip_addresses=[
|
||||
ip_address("192.168.1.100"),
|
||||
ip_address("192.168.1.101"), # Matches config entry
|
||||
],
|
||||
hostname="tube-zigbee-gw.local.",
|
||||
name="mock_name",
|
||||
port=6638,
|
||||
properties={"name": "tube_123456"},
|
||||
type="mock_type",
|
||||
)
|
||||
|
||||
# Discovery should abort due to single instance check
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Should abort since one of the advertised IPs matches existing socket path
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
|
||||
|
||||
@patch(
|
||||
"homeassistant.components.zha.radio_manager.ZhaRadioManager.detect_radio_type",
|
||||
mock_detect_radio_type(radio_type=RadioType.deconz),
|
||||
@@ -2351,28 +2289,34 @@ async def test_config_flow_serial_resolution_oserror(
|
||||
) -> None:
|
||||
"""Test that OSError during serial port resolution is handled."""
|
||||
|
||||
discovery_info = UsbServiceInfo(
|
||||
device="/dev/ttyZIGBEE",
|
||||
pid="AAAA",
|
||||
vid="AAAA",
|
||||
serial_number="1234",
|
||||
description="zigbee radio",
|
||||
manufacturer="test",
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": "manual_pick_radio_type"},
|
||||
data={CONF_RADIO_TYPE: RadioType.ezsp.description},
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB33"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.MENU
|
||||
assert result["step_id"] == "choose_setup_strategy"
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.zha.config_flow.usb.get_serial_by_id",
|
||||
"homeassistant.components.usb.get_serial_by_id",
|
||||
side_effect=OSError("Test error"),
|
||||
),
|
||||
):
|
||||
result_init = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USB}, data=discovery_info
|
||||
setup_result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={"next_step_id": config_flow.SETUP_STRATEGY_RECOMMENDED},
|
||||
)
|
||||
|
||||
assert result_init["type"] is FlowResultType.ABORT
|
||||
assert result_init["reason"] == "cannot_resolve_path"
|
||||
assert result_init["description_placeholders"] == {"path": "/dev/ttyZIGBEE"}
|
||||
assert setup_result["type"] is FlowResultType.ABORT
|
||||
assert setup_result["reason"] == "cannot_resolve_path"
|
||||
assert setup_result["description_placeholders"] == {"path": "/dev/ttyUSB33"}
|
||||
|
||||
|
||||
@patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee")
|
||||
|
@@ -12,7 +12,8 @@ async def test_zone_raises(hass: HomeAssistant) -> None:
|
||||
"""Test that zone raises ConditionError on errors."""
|
||||
config = {
|
||||
"condition": "zone",
|
||||
"options": {"entity_id": "device_tracker.cat", "zone": "zone.home"},
|
||||
"entity_id": "device_tracker.cat",
|
||||
"zone": "zone.home",
|
||||
}
|
||||
config = cv.CONDITION_SCHEMA(config)
|
||||
config = await condition.async_validate_condition_config(hass, config)
|
||||
@@ -65,10 +66,8 @@ async def test_zone_raises(hass: HomeAssistant) -> None:
|
||||
|
||||
config = {
|
||||
"condition": "zone",
|
||||
"options": {
|
||||
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
},
|
||||
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
}
|
||||
config = cv.CONDITION_SCHEMA(config)
|
||||
config = await condition.async_validate_condition_config(hass, config)
|
||||
@@ -103,10 +102,8 @@ async def test_zone_multiple_entities(hass: HomeAssistant) -> None:
|
||||
{
|
||||
"alias": "Zone Condition",
|
||||
"condition": "zone",
|
||||
"options": {
|
||||
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
|
||||
"zone": "zone.home",
|
||||
},
|
||||
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
|
||||
"zone": "zone.home",
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -164,10 +161,8 @@ async def test_multiple_zones(hass: HomeAssistant) -> None:
|
||||
"conditions": [
|
||||
{
|
||||
"condition": "zone",
|
||||
"options": {
|
||||
"entity_id": "device_tracker.person",
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
},
|
||||
"entity_id": "device_tracker.person",
|
||||
"zone": ["zone.home", "zone.work"],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
@@ -1,12 +1,10 @@
|
||||
"""Test automation helpers."""
|
||||
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.helpers.automation import (
|
||||
get_absolute_description_key,
|
||||
get_relative_description_key,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
|
||||
|
||||
@@ -36,73 +34,3 @@ def test_relative_description_key(relative_key: str, absolute_key: str) -> None:
|
||||
"""Test relative description key."""
|
||||
DOMAIN = "homeassistant"
|
||||
assert get_relative_description_key(DOMAIN, absolute_key) == relative_key
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config", "schema_dict", "expected_config"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{},
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
"options": {},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{
|
||||
vol.Required("entity"): str,
|
||||
vol.Optional("from"): str,
|
||||
vol.Optional("to"): str,
|
||||
vol.Optional("for"): dict,
|
||||
vol.Optional("attribute"): str,
|
||||
vol.Optional("value_template"): str,
|
||||
},
|
||||
{
|
||||
"platform": "test",
|
||||
"extra_field": "extra_value",
|
||||
"options": {
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_move_schema_fields_to_options(
|
||||
config, schema_dict, expected_config
|
||||
) -> None:
|
||||
"""Test moving schema fields to options."""
|
||||
assert (
|
||||
move_top_level_schema_fields_to_options(config, schema_dict) == expected_config
|
||||
)
|
||||
|
@@ -32,13 +32,6 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
trace,
|
||||
)
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
async_validate_condition_config,
|
||||
)
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import Integration, async_get_integration
|
||||
@@ -2112,9 +2105,12 @@ async def test_platform_async_get_conditions(hass: HomeAssistant) -> None:
|
||||
async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
"""Test a condition platform with multiple conditions."""
|
||||
|
||||
class MockCondition(Condition):
|
||||
class MockCondition(condition.Condition):
|
||||
"""Mock condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
@@ -2122,24 +2118,23 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
"""Validate config."""
|
||||
return config
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
|
||||
class MockCondition1(MockCondition):
|
||||
"""Mock condition 1."""
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Evaluate state based on configuration."""
|
||||
return lambda hass, vars: True
|
||||
|
||||
class MockCondition2(MockCondition):
|
||||
"""Mock condition 2."""
|
||||
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Evaluate state based on configuration."""
|
||||
return lambda hass, vars: False
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
async def async_get_conditions(
|
||||
hass: HomeAssistant,
|
||||
) -> dict[str, type[condition.Condition]]:
|
||||
return {
|
||||
"_": MockCondition1,
|
||||
"cond_2": MockCondition2,
|
||||
@@ -2153,12 +2148,12 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
config_1 = {CONF_CONDITION: "test"}
|
||||
config_2 = {CONF_CONDITION: "test.cond_2"}
|
||||
config_3 = {CONF_CONDITION: "test.unknown_cond"}
|
||||
assert await async_validate_condition_config(hass, config_1) == config_1
|
||||
assert await async_validate_condition_config(hass, config_2) == config_2
|
||||
assert await condition.async_validate_condition_config(hass, config_1) == config_1
|
||||
assert await condition.async_validate_condition_config(hass, config_2) == config_2
|
||||
with pytest.raises(
|
||||
vol.Invalid, match="Invalid condition 'test.unknown_cond' specified"
|
||||
):
|
||||
await async_validate_condition_config(hass, config_3)
|
||||
await condition.async_validate_condition_config(hass, config_3)
|
||||
|
||||
cond_func = await condition.async_from_config(hass, config_1)
|
||||
assert cond_func(hass, {}) is True
|
||||
@@ -2170,74 +2165,6 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
|
||||
await condition.async_from_config(hass, config_3)
|
||||
|
||||
|
||||
async def test_platform_migrate_trigger(hass: HomeAssistant) -> None:
|
||||
"""Test a condition platform with a migration."""
|
||||
|
||||
OPTIONS_SCHEMA_DICT = {
|
||||
vol.Required("option_1"): str,
|
||||
vol.Optional("option_2"): int,
|
||||
}
|
||||
|
||||
class MockCondition(Condition):
|
||||
"""Mock condition."""
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return config
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
return {
|
||||
"_": MockCondition,
|
||||
}
|
||||
|
||||
mock_integration(hass, MockModule("test"))
|
||||
mock_platform(
|
||||
hass, "test.condition", Mock(async_get_conditions=async_get_conditions)
|
||||
)
|
||||
|
||||
config_1 = {
|
||||
"condition": "test",
|
||||
"option_1": "value_1",
|
||||
"option_2": 2,
|
||||
}
|
||||
config_2 = {
|
||||
"condition": "test",
|
||||
"option_1": "value_1",
|
||||
}
|
||||
config_1_migrated = {
|
||||
"condition": "test",
|
||||
"options": {"option_1": "value_1", "option_2": 2},
|
||||
}
|
||||
config_2_migrated = {
|
||||
"condition": "test",
|
||||
"options": {"option_1": "value_1"},
|
||||
}
|
||||
|
||||
assert await async_validate_condition_config(hass, config_1) == config_1_migrated
|
||||
assert await async_validate_condition_config(hass, config_2) == config_2_migrated
|
||||
assert (
|
||||
await async_validate_condition_config(hass, config_1_migrated)
|
||||
== config_1_migrated
|
||||
)
|
||||
assert (
|
||||
await async_validate_condition_config(hass, config_2_migrated)
|
||||
== config_2_migrated
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("enabled_value", [True, "{{ 1 == 1 }}"])
|
||||
async def test_enabled_condition(
|
||||
hass: HomeAssistant, enabled_value: bool | str
|
||||
|
@@ -19,7 +19,6 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import trigger
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.trigger import (
|
||||
DATA_PLUGGABLE_ACTIONS,
|
||||
PluggableAction,
|
||||
@@ -30,6 +29,7 @@ from homeassistant.helpers.trigger import (
|
||||
_async_get_trigger_platform,
|
||||
async_initialize_triggers,
|
||||
async_validate_trigger_config,
|
||||
move_top_level_schema_fields_to_options,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import Integration, async_get_integration
|
||||
@@ -449,6 +449,76 @@ async def test_pluggable_action(
|
||||
assert not plug_2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("config", "schema_dict", "expected_config"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{},
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
"options": {},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"platform": "test",
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
"extra_field": "extra_value",
|
||||
},
|
||||
{
|
||||
vol.Required("entity"): str,
|
||||
vol.Optional("from"): str,
|
||||
vol.Optional("to"): str,
|
||||
vol.Optional("for"): dict,
|
||||
vol.Optional("attribute"): str,
|
||||
vol.Optional("value_template"): str,
|
||||
},
|
||||
{
|
||||
"platform": "test",
|
||||
"extra_field": "extra_value",
|
||||
"options": {
|
||||
"entity": "sensor.test",
|
||||
"from": "open",
|
||||
"to": "closed",
|
||||
"for": {"hours": 1},
|
||||
"attribute": "state",
|
||||
"value_template": "{{ value_json.val }}",
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_move_schema_fields_to_options(
|
||||
config, schema_dict, expected_config
|
||||
) -> None:
|
||||
"""Test moving schema fields to options."""
|
||||
assert (
|
||||
move_top_level_schema_fields_to_options(config, schema_dict) == expected_config
|
||||
)
|
||||
|
||||
|
||||
async def test_platform_multiple_triggers(hass: HomeAssistant) -> None:
|
||||
"""Test a trigger platform with multiple trigger."""
|
||||
|
||||
|
Reference in New Issue
Block a user