Compare commits

..

39 Commits

Author SHA1 Message Date
Martin Hjelmare
47dc51511c Link firmware install strings 2025-09-26 16:36:35 +02:00
Martin Hjelmare
127fcd4e16 Update firmware install strings 2025-09-26 12:32:53 +02:00
Martin Hjelmare
bfea5eec26 Update OTBR add-on strings 2025-09-26 11:29:58 +02:00
Erik Montnemery
89b327ed7b Remove device filter from target selector in bang_olufsen services (#152957) 2025-09-26 09:02:14 +02:00
Simone Chemelli
9bf361a1b8 Fix PIN failure if starting with 0 for Comelit SimpleHome (#152983) 2025-09-26 08:59:03 +02:00
J. Diego Rodríguez Royo
d11c171c75 Bump aiohomeconnect to version 0.20.0 (#153003) 2025-09-26 07:49:38 +02:00
puddly
c523c45d17 Allow ZHA discovery if discovery unique_id conflicts with config entry (#153009)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-26 07:39:00 +02:00
puddly
c1b9c0e1b6 Ignore discovery for existing ZHA entries (#152984) 2025-09-26 07:17:01 +02:00
puddly
487b9ff03e Bump ZHA to 0.0.73 (#153007) 2025-09-25 23:44:25 -04:00
Simone Chemelli
ec62b0cdfb Code optimization for Uptime Robot (#152993) 2025-09-26 00:34:09 +01:00
Brandon Harvey
6d0470064f Rename service to action in ESPHome (#152997) 2025-09-25 14:54:06 -05:00
Simone Chemelli
7450b3fd1a Improve tests for Alexa Devices (#152995) 2025-09-25 21:39:44 +02:00
Noah Husby
5b70910d77 Bump aiorussound to 4.8.2 (#152988) 2025-09-25 20:34:29 +02:00
Abílio Costa
52de5ff5ff Remove deprecated zone and event condition keys (#152986) 2025-09-25 19:23:40 +02:00
J. Nick Koston
c4389a1679 Bump aioesphomeapi to 41.10.0 (#152975)
Co-authored-by: Michael Hansen <mike@rhasspy.org>
2025-09-25 19:21:17 +02:00
Norbert Rittel
35faaa6cae Add missing square brackets to references in fully_kiosk actions (#152987) 2025-09-25 19:19:27 +02:00
Paul Bottein
3c0b13975a Update frontend to 20250925.1 (#152985) 2025-09-25 19:05:12 +02:00
Simone Chemelli
bc88696339 Remove deprecated sensors and update remaning for Alexa Devices (#151230) 2025-09-25 18:59:53 +02:00
Erik Montnemery
8f99c3f64a Remove device filter from target selector in lyric services (#152970) 2025-09-25 18:45:32 +02:00
Erik Montnemery
88016d96d4 Remove device and entity filter from target selector in homeassistant services (#152969) 2025-09-25 17:41:54 +01:00
Erik Montnemery
47df73b18f Remove device filter from target selector in google_mail services (#152968) 2025-09-25 18:32:12 +02:00
Maciej Bieniek
1c12d2b8cd Bump accuweather to version 4.2.2 (#152965) 2025-09-25 18:30:47 +02:00
Erik Montnemery
eb38837a8c Replace target selector with device selector in fully_kiosk services (#152959)
Co-authored-by: Franck Nijhof <git@frenck.dev>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-25 18:30:05 +02:00
Erik Montnemery
159c7fbfd1 Correct filter of target selector in sonos services (#152972) 2025-09-25 18:29:26 +02:00
Joost Lekkerkerker
7ee31f0884 Bump pySmartThings to 3.3.0 (#152977) 2025-09-25 17:57:30 +02:00
Daniel Potthast
0c5e12571a Update mvglive component (#146479)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-09-25 17:20:43 +02:00
Luke Lashley
9db973217f Fix incorrect Roborock test (#152980) 2025-09-25 17:18:24 +02:00
Artur Pragacz
cf1a745283 Move condition-specific fields into options (#152635) 2025-09-25 15:55:50 +02:00
peteS-UK
834e3f1963 Add HassKey for hass.data in Squeezebox (#149129) 2025-09-25 14:05:40 +02:00
Joakim Sørensen
3f8f7573c9 Bump hass-nabucasa from 1.1.1 to 1.1.2 (#152950) 2025-09-25 11:34:14 +01:00
Karsten Bade
0ae272f1f6 Add return types and docstring to sonos component (#152946) 2025-09-25 11:34:38 +02:00
Paul Bottein
8774295e2e Update frontend to 20250925.0 (#152945) 2025-09-25 11:33:01 +02:00
Erwin Douna
0c8d2594ef Portainer fix unique entity (#152941)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-25 09:49:22 +02:00
Simone Chemelli
205bd2676b Update IQS to platinum for Alexa Devices (#152905) 2025-09-25 09:45:50 +02:00
dependabot[bot]
25849fd9cc Bump actions/cache from 4.2.4 to 4.3.0 (#152934)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-25 09:43:03 +02:00
Sab44
7d6eac9ff7 Bump librehardwaremonitor-api to version 1.4.0 (#152938) 2025-09-25 09:42:31 +02:00
Luke Lashley
31017ebc98 Fix logical error when user has no Roborock maps (#152752) 2025-09-25 09:39:52 +02:00
Jimmy Zhening Luo
724a7b0ecc Quality: mark installation param doc as done (#152909) 2025-09-25 09:06:13 +02:00
Paulus Schoutsen
91e13d447a Prevent common control calling async methods from thread (#152931)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-09-24 23:09:54 -04:00
80 changed files with 1254 additions and 760 deletions

View File

@@ -263,7 +263,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
@@ -570,7 +570,7 @@ jobs:
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -741,7 +741,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: .mypy_cache
key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"requirements": ["accuweather==4.2.1"]
"requirements": ["accuweather==4.2.2"]
}

View File

@@ -10,6 +10,7 @@ from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SENSOR_STATE_OFF
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
@@ -20,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import async_update_unique_id
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -31,6 +33,7 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
is_on_fn: Callable[[AmazonDevice, str], bool]
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
BINARY_SENSORS: Final = (
@@ -41,46 +44,15 @@ BINARY_SENSORS: Final = (
is_on_fn=lambda device, _: device.online,
),
AmazonBinarySensorEntityDescription(
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, _: device.bluetooth_state,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="humanPresenceDetectionState",
key="detectionState",
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_on_fn=lambda device, key: bool(
device.sensors[key].value != SENSOR_STATE_OFF
),
is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: (
device.online and device.sensors[key].error is False
),
),
)
@@ -94,6 +66,22 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "detectionState" binary sensor
await async_update_unique_id(
hass,
coordinator,
BINARY_SENSOR_DOMAIN,
"humanPresenceDetectionState",
"detectionState",
)
async_add_entities(
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in BINARY_SENSORS
for serial_num in coordinator.data
if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key)
)
known_devices: set[str] = set()
def _check_device() -> None:
@@ -125,3 +113,13 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
return self.entity_description.is_on_fn(
self.device, self.entity_description.key
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
data = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError):
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
)
except CannotConnect:
errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError):
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"

View File

@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
translation_key="cannot_retrieve_data_with_error",
translation_placeholders={"error": repr(err)},
) from err
except (CannotAuthenticate, TypeError) as err:
except CannotAuthenticate as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",

View File

@@ -60,7 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
"online": device.online,
"serial number": device.serial_number,
"software version": device.software_version,
"do not disturb": device.do_not_disturb,
"response style": device.response_style,
"bluetooth state": device.bluetooth_state,
"sensors": device.sensors,
}

View File

@@ -1,44 +1,4 @@
{
"entity": {
"binary_sensor": {
"bluetooth": {
"default": "mdi:bluetooth-off",
"state": {
"on": "mdi:bluetooth"
}
},
"baby_cry_detection": {
"default": "mdi:account-voice-off",
"state": {
"on": "mdi:account-voice"
}
},
"beeping_appliance_detection": {
"default": "mdi:bell-off",
"state": {
"on": "mdi:bell-ring"
}
},
"cough_detection": {
"default": "mdi:blur-off",
"state": {
"on": "mdi:blur"
}
},
"dog_bark_detection": {
"default": "mdi:dog-side-off",
"state": {
"on": "mdi:dog-side"
}
},
"water_sounds_detection": {
"default": "mdi:water-pump-off",
"state": {
"on": "mdi:water-pump"
}
}
}
},
"services": {
"send_sound": {
"service": "mdi:cast-audio"

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "silver",
"requirements": ["aioamazondevices==6.0.0"]
"quality_scale": "platinum",
"requirements": ["aioamazondevices==6.2.6"]
}

View File

@@ -31,6 +31,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
"""Amazon Devices sensor entity description."""
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
SENSORS: Final = (
@@ -99,3 +102,13 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.device.sensors[self.entity_description.key].value
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -58,26 +58,6 @@
}
},
"entity": {
"binary_sensor": {
"bluetooth": {
"name": "Bluetooth"
},
"baby_cry_detection": {
"name": "Baby crying"
},
"beeping_appliance_detection": {
"name": "Beeping appliance"
},
"cough_detection": {
"name": "Coughing"
},
"dog_bark_detection": {
"name": "Dog barking"
},
"water_sounds_detection": {
"name": "Water sounds"
}
},
"notify": {
"speak": {
"name": "Speak"

View File

@@ -8,13 +8,17 @@ from typing import TYPE_CHECKING, Any, Final
from aioamazondevices.api import AmazonDevice
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,
SwitchEntity,
SwitchEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import alexa_api_call
from .utils import alexa_api_call, async_update_unique_id
PARALLEL_UPDATES = 1
@@ -24,16 +28,17 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
"""Alexa Devices switch entity description."""
is_on_fn: Callable[[AmazonDevice], bool]
subkey: str
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
method: str
SWITCHES: Final = (
AmazonSwitchEntityDescription(
key="do_not_disturb",
subkey="AUDIO_PLAYER",
key="dnd",
translation_key="do_not_disturb",
is_on_fn=lambda _device: _device.do_not_disturb,
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
method="set_do_not_disturb",
),
)
@@ -48,6 +53,11 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "DND" switch and remove from Speaker Group
await async_update_unique_id(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
)
known_devices: set[str] = set()
def _check_device() -> None:
@@ -59,7 +69,7 @@ async def async_setup_entry(
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
for switch_desc in SWITCHES
for serial_num in new_devices
if switch_desc.subkey in coordinator.data[serial_num].capabilities
if switch_desc.key in coordinator.data[serial_num].sensors
)
_check_device()
@@ -94,3 +104,13 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
def is_on(self) -> bool:
"""Return True if switch is on."""
return self.entity_description.is_on_fn(self.device)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -6,9 +6,12 @@ from typing import Any, Concatenate
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er
from .const import DOMAIN
from .const import _LOGGER, DOMAIN
from .coordinator import AmazonDevicesCoordinator
from .entity import AmazonEntity
@@ -38,3 +41,23 @@ def alexa_api_call[_T: AmazonEntity, **_P](
) from err
return cmd_wrapper
async def async_update_unique_id(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
domain: str,
old_key: str,
new_key: str,
) -> None:
"""Update unique id for entities created with old format."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{old_key}"
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
_LOGGER.debug("Updating unique_id for %s", entity_id)
new_unique_id = unique_id.replace(old_key, new_key)
# Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)

View File

@@ -3,16 +3,12 @@ beolink_allstandby:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_expand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
all_discovered:
required: false
@@ -37,8 +33,6 @@ beolink_join:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false
@@ -71,16 +65,12 @@ beolink_leave:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_unexpand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.1.1"],
"requirements": ["hass-nabucasa==1.1.2"],
"single_config_entry": true
}

View File

@@ -25,23 +25,27 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = 111111
DEFAULT_PIN = "111111"
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
}
)

View File

@@ -6,12 +6,13 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
trace_condition_function,
)
from homeassistant.helpers.typing import ConfigType
@@ -55,19 +56,40 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition):
"""Device condition."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
self._hass = hass
_hass: HomeAssistant
_config: ConfigType
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate device condition config."""
return await async_validate_device_automation_config(
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
)
"""Validate config.
This is here just to satisfy the abstract class interface. It is never called.
"""
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition."""

View File

@@ -1073,7 +1073,7 @@ def _async_register_service(
service_name,
{
"description": (
f"Calls the service {service.name} of the node {device_info.name}"
f"Performs the action {service.name} of the node {device_info.name}"
),
"fields": fields,
},

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==41.9.4",
"aioesphomeapi==41.10.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.3.0"
],

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250924.0"]
"requirements": ["home-assistant-frontend==20250925.1"]
}

View File

@@ -1,8 +1,10 @@
load_url:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
url:
example: "https://home-assistant.io"
required: true
@@ -10,10 +12,12 @@ load_url:
text:
set_config:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
key:
example: "motionSensitivity"
required: true
@@ -26,12 +30,14 @@ set_config:
text:
start_application:
target:
device:
integration: fully_kiosk
fields:
application:
example: "de.ozerov.fully"
required: true
selector:
text:
device_id:
required: true
selector:
device:
integration: fully_kiosk

View File

@@ -147,6 +147,10 @@
"name": "Load URL",
"description": "Loads a URL on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "Device ID",
"description": "The target device for this action."
},
"url": {
"name": "[%key:common::config_flow::data::url%]",
"description": "URL to load."
@@ -157,6 +161,10 @@
"name": "Set configuration",
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
},
"key": {
"name": "Key",
"description": "Configuration parameter to set."
@@ -174,6 +182,10 @@
"application": {
"name": "Application",
"description": "Package name of the application to start."
},
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
}
}
}

View File

@@ -1,7 +1,5 @@
set_vacation:
target:
device:
integration: google_mail
entity:
integration: google_mail
fields:

View File

@@ -22,6 +22,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.19.0"],
"requirements": ["aiohomeconnect==0.20.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -32,15 +32,12 @@ set_location:
stop:
toggle:
target:
entity: {}
turn_on:
target:
entity: {}
turn_off:
target:
entity: {}
update_entity:
fields:
@@ -53,8 +50,6 @@ update_entity:
reload_custom_templates:
reload_config_entry:
target:
entity: {}
device: {}
fields:
entry_id:
advanced: true

View File

@@ -27,6 +27,12 @@
"install_addon": {
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
},
"install_thread_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
},
"install_zigbee_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
},
"notify_channel_change": {
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
@@ -69,12 +75,10 @@
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
},
"install_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
},
"start_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
},
"otbr_failed": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
@@ -129,14 +133,21 @@
},
"progress": {
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
}
},
"config": {
"flow_title": "{model}",
"step": {
"install_thread_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
},
"install_zigbee_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
},
"pick_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
@@ -158,12 +169,10 @@
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
},
"install_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
},
"start_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
},
"otbr_failed": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
@@ -215,9 +224,10 @@
},
"progress": {
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
}
},
"exceptions": {

View File

@@ -23,12 +23,16 @@
"description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration."
},
"install_otbr_addon": {
"title": "Installing OpenThread Border Router add-on",
"description": "The OpenThread Border Router (OTBR) add-on is being installed."
"title": "Configuring Thread"
},
"install_thread_firmware": {
"title": "Updating adapter"
},
"install_zigbee_firmware": {
"title": "Updating adapter"
},
"start_otbr_addon": {
"title": "Starting OpenThread Border Router add-on",
"description": "The OpenThread Border Router (OTBR) add-on is now starting."
"title": "Configuring Thread"
},
"otbr_failed": {
"title": "Failed to set up OpenThread Border Router",
@@ -72,7 +76,9 @@
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
},
"progress": {
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
"install_firmware": "Installing {firmware_name} firmware. Do not make any changes to your hardware or software until this finishes.",
"install_otbr_addon": "Installing add-on",
"start_otbr_addon": "Starting add-on"
}
}
},

View File

@@ -27,6 +27,12 @@
"install_addon": {
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
},
"install_thread_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
},
"install_zigbee_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
},
"notify_channel_change": {
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
@@ -69,12 +75,10 @@
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
},
"install_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
},
"start_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
},
"otbr_failed": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
@@ -129,9 +133,10 @@
},
"progress": {
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
}
},
"config": {
@@ -158,12 +163,16 @@
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
},
"install_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
},
"install_thread_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
},
"install_zigbee_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
},
"start_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
},
"otbr_failed": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
@@ -215,9 +224,10 @@
},
"progress": {
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
}
},
"exceptions": {

View File

@@ -35,6 +35,12 @@
"install_addon": {
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
},
"install_thread_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
},
"install_zigbee_firmware": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
},
"notify_channel_change": {
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
@@ -92,12 +98,10 @@
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
},
"install_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
},
"start_otbr_addon": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
},
"otbr_failed": {
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
@@ -154,9 +158,10 @@
},
"progress": {
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
}
},
"entity": {

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/libre_hardware_monitor",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["librehardwaremonitor-api==1.3.1"]
"requirements": ["librehardwaremonitor-api==1.4.0"]
}

View File

@@ -28,7 +28,7 @@ rules:
docs-configuration-parameters:
status: done
comment: No options to configure
docs-installation-parameters: todo
docs-installation-parameters: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo

View File

@@ -1,7 +1,5 @@
set_hold_time:
target:
device:
integration: lyric
entity:
integration: lyric
domain: climate

View File

@@ -2,10 +2,8 @@
"domain": "mvglive",
"name": "MVG",
"codeowners": [],
"disabled": "This integration is disabled because it uses non-open source code to operate.",
"documentation": "https://www.home-assistant.io/integrations/mvglive",
"iot_class": "cloud_polling",
"loggers": ["MVGLive"],
"quality_scale": "legacy",
"requirements": ["PyMVGLive==1.1.4"]
"loggers": ["MVG"],
"requirements": ["mvg==1.4.0"]
}

View File

@@ -1,13 +1,14 @@
"""Support for departure information for public transport in Munich."""
# mypy: ignore-errors
from __future__ import annotations
from collections.abc import Mapping
from copy import deepcopy
from datetime import timedelta
import logging
from typing import Any
import MVGLive
from mvg import MvgApi, MvgApiError, TransportType
import voluptuous as vol
from homeassistant.components.sensor import (
@@ -19,6 +20,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
@@ -44,53 +46,55 @@ ICONS = {
"SEV": "mdi:checkbox-blank-circle-outline",
"-": "mdi:clock",
}
ATTRIBUTION = "Data provided by MVG-live.de"
ATTRIBUTION = "Data provided by mvg.de"
SCAN_INTERVAL = timedelta(seconds=30)
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NEXT_DEPARTURE): [
{
vol.Required(CONF_STATION): cv.string,
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
vol.Optional(
CONF_PRODUCTS, default=DEFAULT_PRODUCT
): cv.ensure_list_csv,
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
}
]
}
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_DIRECTIONS),
SENSOR_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NEXT_DEPARTURE): [
{
vol.Required(CONF_STATION): cv.string,
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
vol.Optional(
CONF_PRODUCTS, default=DEFAULT_PRODUCT
): cv.ensure_list_csv,
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
}
]
}
),
)
def setup_platform(
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the MVGLive sensor."""
add_entities(
(
MVGLiveSensor(
nextdeparture.get(CONF_STATION),
nextdeparture.get(CONF_DESTINATIONS),
nextdeparture.get(CONF_DIRECTIONS),
nextdeparture.get(CONF_LINES),
nextdeparture.get(CONF_PRODUCTS),
nextdeparture.get(CONF_TIMEOFFSET),
nextdeparture.get(CONF_NUMBER),
nextdeparture.get(CONF_NAME),
)
for nextdeparture in config[CONF_NEXT_DEPARTURE]
),
True,
)
sensors = [
MVGLiveSensor(
hass,
nextdeparture.get(CONF_STATION),
nextdeparture.get(CONF_DESTINATIONS),
nextdeparture.get(CONF_LINES),
nextdeparture.get(CONF_PRODUCTS),
nextdeparture.get(CONF_TIMEOFFSET),
nextdeparture.get(CONF_NUMBER),
nextdeparture.get(CONF_NAME),
)
for nextdeparture in config[CONF_NEXT_DEPARTURE]
]
add_entities(sensors, True)
class MVGLiveSensor(SensorEntity):
@@ -100,38 +104,38 @@ class MVGLiveSensor(SensorEntity):
def __init__(
self,
station,
hass: HomeAssistant,
station_name,
destinations,
directions,
lines,
products,
timeoffset,
number,
name,
):
) -> None:
"""Initialize the sensor."""
self._station = station
self._name = name
self._station_name = station_name
self.data = MVGLiveData(
station, destinations, directions, lines, products, timeoffset, number
hass, station_name, destinations, lines, products, timeoffset, number
)
self._state = None
self._icon = ICONS["-"]
@property
def name(self):
def name(self) -> str | None:
"""Return the name of the sensor."""
if self._name:
return self._name
return self._station
return self._station_name
@property
def native_value(self):
def native_value(self) -> str | None:
"""Return the next departure time."""
return self._state
@property
def extra_state_attributes(self):
def extra_state_attributes(self) -> Mapping[str, Any] | None:
"""Return the state attributes."""
if not (dep := self.data.departures):
return None
@@ -140,88 +144,114 @@ class MVGLiveSensor(SensorEntity):
return attr
@property
def icon(self):
def icon(self) -> str | None:
"""Icon to use in the frontend, if any."""
return self._icon
@property
def native_unit_of_measurement(self):
def native_unit_of_measurement(self) -> str | None:
"""Return the unit this state is expressed in."""
return UnitOfTime.MINUTES
def update(self) -> None:
async def async_update(self) -> None:
"""Get the latest data and update the state."""
self.data.update()
await self.data.update()
if not self.data.departures:
self._state = "-"
self._state = None
self._icon = ICONS["-"]
else:
self._state = self.data.departures[0].get("time", "-")
self._icon = ICONS[self.data.departures[0].get("product", "-")]
self._state = self.data.departures[0].get("time_in_mins", "-")
self._icon = self.data.departures[0].get("icon", ICONS["-"])
def _get_minutes_until_departure(departure_time: int) -> int:
"""Calculate the time difference in minutes between the current time and a given departure time.
Args:
departure_time: Unix timestamp of the departure time, in seconds.
Returns:
The time difference in minutes, as an integer.
"""
current_time = dt_util.utcnow()
departure_datetime = dt_util.utc_from_timestamp(departure_time)
time_difference = (departure_datetime - current_time).total_seconds()
return int(time_difference / 60.0)
class MVGLiveData:
"""Pull data from the mvg-live.de web page."""
"""Pull data from the mvg.de web page."""
def __init__(
self, station, destinations, directions, lines, products, timeoffset, number
):
self,
hass: HomeAssistant,
station_name,
destinations,
lines,
products,
timeoffset,
number,
) -> None:
"""Initialize the sensor."""
self._station = station
self._hass = hass
self._station_name = station_name
self._station_id = None
self._destinations = destinations
self._directions = directions
self._lines = lines
self._products = products
self._timeoffset = timeoffset
self._number = number
self._include_ubahn = "U-Bahn" in self._products
self._include_tram = "Tram" in self._products
self._include_bus = "Bus" in self._products
self._include_sbahn = "S-Bahn" in self._products
self.mvg = MVGLive.MVGLive()
self.departures = []
self.departures: list[dict[str, Any]] = []
def update(self):
async def update(self):
"""Update the connection data."""
if self._station_id is None:
try:
station = await MvgApi.station_async(self._station_name)
self._station_id = station["id"]
except MvgApiError as err:
_LOGGER.error(
"Failed to resolve station %s: %s", self._station_name, err
)
self.departures = []
return
try:
_departures = self.mvg.getlivedata(
station=self._station,
timeoffset=self._timeoffset,
ubahn=self._include_ubahn,
tram=self._include_tram,
bus=self._include_bus,
sbahn=self._include_sbahn,
_departures = await MvgApi.departures_async(
station_id=self._station_id,
offset=self._timeoffset,
limit=self._number,
transport_types=[
transport_type
for transport_type in TransportType
if transport_type.value[0] in self._products
]
if self._products
else None,
)
except ValueError:
self.departures = []
_LOGGER.warning("Returned data not understood")
return
self.departures = []
for i, _departure in enumerate(_departures):
# find the first departure meeting the criteria
for _departure in _departures:
if (
"" not in self._destinations[:1]
and _departure["destination"] not in self._destinations
):
continue
if (
"" not in self._directions[:1]
and _departure["direction"] not in self._directions
):
if "" not in self._lines[:1] and _departure["line"] not in self._lines:
continue
if "" not in self._lines[:1] and _departure["linename"] not in self._lines:
time_to_departure = _get_minutes_until_departure(_departure["time"])
if time_to_departure < self._timeoffset:
continue
if _departure["time"] < self._timeoffset:
continue
# now select the relevant data
_nextdep = {}
for k in ("destination", "linename", "time", "direction", "product"):
for k in ("destination", "line", "type", "cancelled", "icon"):
_nextdep[k] = _departure.get(k, "")
_nextdep["time"] = int(_nextdep["time"])
_nextdep["time_in_mins"] = time_to_departure
self.departures.append(_nextdep)
if i == self._number - 1:
break

View File

@@ -131,7 +131,15 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
# Container ID's are ephemeral, so use the container name for the unique ID
# The first one, should always be unique, it's fine if users have aliases
# According to Docker's API docs, the first name is unique
device_identifier = (
self._device_info.names[0].replace("/", " ").strip()
if self._device_info.names
else None
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
@property
def available(self) -> bool:

View File

@@ -60,7 +60,7 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
self._attr_device_info = DeviceInfo(
identifiers={
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{self.device_id}")
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{device_name}")
},
manufacturer=DEFAULT_NAME,
model="Container",

View File

@@ -351,13 +351,9 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
def _set_current_map(self) -> None:
if (
self.roborock_device_info.props.status is not None
and self.roborock_device_info.props.status.map_status is not None
and self.roborock_device_info.props.status.current_map is not None
):
# The map status represents the map flag as flag * 4 + 3 -
# so we have to invert that in order to get the map flag that we can use to set the current map.
self.current_map = (
self.roborock_device_info.props.status.map_status - 3
) // 4
self.current_map = self.roborock_device_info.props.status.current_map
async def set_current_map_rooms(self) -> None:
"""Fetch all of the rooms for the current map and set on RoborockMapInfo."""
@@ -440,7 +436,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
# If either of these fail, we don't care, and we want to continue.
await asyncio.gather(*tasks, return_exceptions=True)
if len(self.maps) != 1:
if len(self.maps) > 1:
# Set the map back to the map the user previously had selected so that it
# does not change the end user's app.
# Only needs to happen when we changed maps above.

View File

@@ -7,6 +7,6 @@
"iot_class": "local_push",
"loggers": ["aiorussound"],
"quality_scale": "silver",
"requirements": ["aiorussound==4.8.1"],
"requirements": ["aiorussound==4.8.2"],
"zeroconf": ["_rio._tcp.local."]
}

View File

@@ -30,5 +30,5 @@
"iot_class": "cloud_push",
"loggers": ["pysmartthings"],
"quality_scale": "bronze",
"requirements": ["pysmartthings==3.2.9"]
"requirements": ["pysmartthings==3.3.0"]
}

View File

@@ -610,7 +610,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
def _play_media_queue(
self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue
):
) -> None:
"""Manage adding, replacing, playing items onto the sonos queue."""
_LOGGER.debug(
"_play_media_queue item_id [%s] title [%s] enqueue [%s]",
@@ -639,7 +639,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
media_type: MediaType | str,
media_id: str,
enqueue: MediaPlayerEnqueue,
):
) -> None:
"""Play a directory from a music library share."""
item = media_browser.get_media(self.media.library, media_id, media_type)
if not item:
@@ -660,6 +660,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
enqueue: MediaPlayerEnqueue,
title: str,
) -> None:
"""Play a sharelink."""
share_link = self.coordinator.share_link
kwargs = {}
if title:

View File

@@ -24,8 +24,9 @@ restore:
set_sleep_timer:
target:
device:
entity:
integration: sonos
domain: media_player
fields:
sleep_time:
selector:
@@ -36,13 +37,15 @@ set_sleep_timer:
clear_sleep_timer:
target:
device:
entity:
integration: sonos
domain: media_player
play_queue:
target:
device:
entity:
integration: sonos
domain: media_player
fields:
queue_position:
selector:
@@ -53,8 +56,9 @@ play_queue:
remove_from_queue:
target:
device:
entity:
integration: sonos
domain: media_player
fields:
queue_position:
selector:
@@ -71,8 +75,9 @@ get_queue:
update_alarm:
target:
device:
entity:
integration: sonos
domain: media_player
fields:
alarm_id:
required: true

View File

@@ -1,5 +1,6 @@
"""The Squeezebox integration."""
import asyncio
from asyncio import timeout
from dataclasses import dataclass, field
from datetime import datetime
@@ -31,11 +32,11 @@ from homeassistant.helpers.device_registry import (
)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_call_later
from homeassistant.util.hass_dict import HassKey
from .const import (
CONF_HTTPS,
DISCOVERY_INTERVAL,
DISCOVERY_TASK,
DOMAIN,
SERVER_MANUFACTURER,
SERVER_MODEL,
@@ -64,6 +65,8 @@ PLATFORMS = [
Platform.UPDATE,
]
SQUEEZEBOX_HASS_DATA: HassKey[asyncio.Task] = HassKey(DOMAIN)
@dataclass
class SqueezeboxData:
@@ -240,7 +243,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry)
current_entries = hass.config_entries.async_entries(DOMAIN)
if len(current_entries) == 1 and current_entries[0] == entry:
_LOGGER.debug("Stopping server discovery task")
hass.data[DOMAIN][DISCOVERY_TASK].cancel()
hass.data[DOMAIN].pop(DISCOVERY_TASK)
hass.data[SQUEEZEBOX_HASS_DATA].cancel()
hass.data.pop(SQUEEZEBOX_HASS_DATA)
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -1,7 +1,6 @@
"""Constants for the Squeezebox component."""
CONF_HTTPS = "https"
DISCOVERY_TASK = "discovery_task"
DOMAIN = "squeezebox"
DEFAULT_PORT = 9000
PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub"

View File

@@ -44,6 +44,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.start import async_at_start
from homeassistant.util.dt import utcnow
from . import SQUEEZEBOX_HASS_DATA
from .browse_media import (
BrowseData,
build_item_response,
@@ -58,7 +59,6 @@ from .const import (
CONF_VOLUME_STEP,
DEFAULT_BROWSE_LIMIT,
DEFAULT_VOLUME_STEP,
DISCOVERY_TASK,
DOMAIN,
SERVER_MANUFACTURER,
SERVER_MODEL,
@@ -110,12 +110,10 @@ async def start_server_discovery(hass: HomeAssistant) -> None:
},
)
hass.data.setdefault(DOMAIN, {})
if DISCOVERY_TASK not in hass.data[DOMAIN]:
if not hass.data.get(SQUEEZEBOX_HASS_DATA):
_LOGGER.debug("Adding server discovery task for squeezebox")
hass.data[DOMAIN][DISCOVERY_TASK] = hass.async_create_background_task(
async_discover(_discovered_server),
name="squeezebox server discovery",
hass.data[SQUEEZEBOX_HASS_DATA] = hass.async_create_background_task(
async_discover(_discovered_server), name="squeezebox server discovery"
)

View File

@@ -3,16 +3,18 @@
from __future__ import annotations
from datetime import datetime, timedelta
from typing import cast
from typing import Any, cast
import voluptuous as vol
from homeassistant.const import CONF_CONDITION, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
from homeassistant.const import CONF_OPTIONS, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
condition_trace_set_result,
condition_trace_update_result,
trace_condition_function,
@@ -21,20 +23,22 @@ from homeassistant.helpers.sun import get_astral_event_date
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from homeassistant.util import dt as dt_util
_CONDITION_SCHEMA = vol.All(
vol.Schema(
{
**cv.CONDITION_BASE_SCHEMA,
vol.Required(CONF_CONDITION): "sun",
vol.Optional("before"): cv.sun_event,
vol.Optional("before_offset"): cv.time_period,
vol.Optional("after"): vol.All(
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
),
vol.Optional("after_offset"): cv.time_period,
}
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
vol.Optional("before"): cv.sun_event,
vol.Optional("before_offset"): cv.time_period,
vol.Optional("after"): vol.All(
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
),
cv.has_at_least_one_key("before", "after"),
vol.Optional("after_offset"): cv.time_period,
}
_CONDITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_OPTIONS): vol.All(
_OPTIONS_SCHEMA_DICT,
cv.has_at_least_one_key("before", "after"),
)
}
)
@@ -125,24 +129,36 @@ def sun(
class SunCondition(Condition):
"""Sun condition."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
self._hass = hass
_options: dict[str, Any]
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = move_top_level_schema_fields_to_options(
complete_config, _OPTIONS_SCHEMA_DICT
)
return await super().async_validate_complete_config(hass, complete_config)
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
return cast(ConfigType, _CONDITION_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
assert config.options is not None
self._options = config.options
async def async_get_checker(self) -> ConditionCheckerType:
"""Wrap action method with sun based condition."""
before = self._config.get("before")
after = self._config.get("after")
before_offset = self._config.get("before_offset")
after_offset = self._config.get("after_offset")
before = self._options.get("before")
after = self._options.get("after")
before_offset = self._options.get("before_offset")
after_offset = self._options.get("after_offset")
@trace_condition_function
def sun_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:

View File

@@ -28,11 +28,12 @@ async def async_setup_entry(
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = {monitor.id for monitor in coordinator.data}
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
entities: list[UptimeRobotBinarySensor] = []
for monitor in coordinator.data:
if monitor.id in known_devices:
continue
known_devices.add(monitor.id)
entities.append(
UptimeRobotBinarySensor(
coordinator,
BinarySensorEntityDescription(
@@ -41,9 +42,9 @@ async def async_setup_entry(
),
monitor=monitor,
)
for monitor in coordinator.data
if monitor.id in new_devices
)
if entities:
async_add_entities(entities)
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))

View File

@@ -91,9 +91,6 @@ async def async_predict_common_control(
Args:
hass: Home Assistant instance
user_id: User ID to filter events by.
Returns:
Dictionary with time categories as keys and lists of most common entity IDs as values
"""
# Get the recorder instance to ensure it's ready
recorder = get_instance(hass)
@@ -108,6 +105,9 @@ async def async_predict_common_control(
time_cat: Counter() for time_cat in TIME_CATEGORIES
}
allowed_entities = set(hass.states.async_entity_ids(ALLOWED_DOMAINS))
hidden_entities: set[str] = set()
# Keep track of contexts that we processed so that we will only process
# the first service call in a context, and not subsequent calls.
context_processed: set[bytes] = set()
@@ -125,7 +125,7 @@ async def async_predict_common_control(
context_processed.add(context_id)
# Parse the event data
if not shared_data:
if not time_fired_ts or not shared_data:
continue
try:
@@ -159,27 +159,26 @@ async def async_predict_common_control(
if not isinstance(entity_ids, list):
entity_ids = [entity_ids]
# Filter out entity IDs that are not in allowed domains
entity_ids = [
entity_id
for entity_id in entity_ids
if entity_id.split(".")[0] in ALLOWED_DOMAINS
and ((entry := ent_reg.async_get(entity_id)) is None or not entry.hidden)
]
# Convert to local time for time category determination
period = time_category(
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
)
period_results = results[period]
if not entity_ids:
continue
# Count entity usage
for entity_id in entity_ids:
if entity_id not in allowed_entities or entity_id in hidden_entities:
continue
# Convert timestamp to datetime and determine time category
if time_fired_ts:
# Convert to local time for time category determination
period = time_category(
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
)
if (
entity_id not in period_results
and (entry := ent_reg.async_get(entity_id))
and entry.hidden
):
hidden_entities.add(entity_id)
continue
# Count entity usage
for entity_id in entity_ids:
results[period][entity_id] += 1
period_results[entity_id] += 1
return EntityUsagePredictions(
morning=[

View File

@@ -23,6 +23,7 @@ from homeassistant.components.homeassistant_hardware import silabs_multiprotocol
from homeassistant.components.homeassistant_yellow import hardware as yellow_hardware
from homeassistant.config_entries import (
SOURCE_IGNORE,
SOURCE_ZEROCONF,
ConfigEntry,
ConfigEntryBaseFlow,
ConfigEntryState,
@@ -183,27 +184,17 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
self._hass = hass
self._radio_mgr.hass = hass
async def _get_config_entry_data(self) -> dict:
def _get_config_entry_data(self) -> dict[str, Any]:
"""Extract ZHA config entry data from the radio manager."""
assert self._radio_mgr.radio_type is not None
assert self._radio_mgr.device_path is not None
assert self._radio_mgr.device_settings is not None
try:
device_path = await self.hass.async_add_executor_job(
usb.get_serial_by_id, self._radio_mgr.device_path
)
except OSError as error:
raise AbortFlow(
reason="cannot_resolve_path",
description_placeholders={"path": self._radio_mgr.device_path},
) from error
return {
CONF_DEVICE: DEVICE_SCHEMA(
{
**self._radio_mgr.device_settings,
CONF_DEVICE_PATH: device_path,
CONF_DEVICE_PATH: self._radio_mgr.device_path,
}
),
CONF_RADIO_TYPE: self._radio_mgr.radio_type.name,
@@ -662,13 +653,8 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
"""Set the flow's unique ID and update the device path in an ignored flow."""
current_entry = await self.async_set_unique_id(unique_id)
if not current_entry:
return
if current_entry.source != SOURCE_IGNORE:
self._abort_if_unique_id_configured()
else:
# Only update the current entry if it is an ignored discovery
# Only update the current entry if it is an ignored discovery
if current_entry and current_entry.source == SOURCE_IGNORE:
self._abort_if_unique_id_configured(
updates={
CONF_DEVICE: {
@@ -703,6 +689,36 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
DOMAIN, include_ignore=False
)
if self._radio_mgr.device_path is not None:
# Ensure the radio manager device path is unique and will match ZHA's
try:
self._radio_mgr.device_path = await self.hass.async_add_executor_job(
usb.get_serial_by_id, self._radio_mgr.device_path
)
except OSError as error:
raise AbortFlow(
reason="cannot_resolve_path",
description_placeholders={"path": self._radio_mgr.device_path},
) from error
# mDNS discovery can advertise the same adapter on multiple IPs or via a
# hostname, which should be considered a duplicate
current_device_paths = {self._radio_mgr.device_path}
if self.source == SOURCE_ZEROCONF:
discovery_info = self.init_data
current_device_paths |= {
f"socket://{ip}:{discovery_info.port}"
for ip in discovery_info.ip_addresses
}
for entry in zha_config_entries:
path = entry.data.get(CONF_DEVICE, {}).get(CONF_DEVICE_PATH)
# Abort discovery if the device path is already configured
if path is not None and path in current_device_paths:
return self.async_abort(reason="single_instance_allowed")
# Without confirmation, discovery can automatically progress into parts of the
# config flow logic that interacts with hardware.
if user_input is not None or (
@@ -873,7 +889,7 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
zha_config_entries = self.hass.config_entries.async_entries(
DOMAIN, include_ignore=False
)
data = await self._get_config_entry_data()
data = self._get_config_entry_data()
if len(zha_config_entries) == 1:
return self.async_update_reload_and_abort(
@@ -976,7 +992,7 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow):
# Avoid creating both `.options` and `.data` by directly writing `data` here
self.hass.config_entries.async_update_entry(
entry=self.config_entry,
data=await self._get_config_entry_data(),
data=self._get_config_entry_data(),
options=self.config_entry.options,
)

View File

@@ -21,7 +21,7 @@
"zha",
"universal_silabs_flasher"
],
"requirements": ["zha==0.0.72"],
"requirements": ["zha==0.0.73"],
"usb": [
{
"vid": "10C4",

View File

@@ -2,14 +2,16 @@
from __future__ import annotations
from typing import Any, cast
import voluptuous as vol
from homeassistant.const import (
ATTR_GPS_ACCURACY,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_CONDITION,
CONF_ENTITY_ID,
CONF_OPTIONS,
CONF_ZONE,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
@@ -17,26 +19,22 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant, State
from homeassistant.exceptions import ConditionErrorContainer, ConditionErrorMessage
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
trace_condition_function,
)
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import in_zone
_CONDITION_SCHEMA = vol.Schema(
{
**cv.CONDITION_BASE_SCHEMA,
vol.Required(CONF_CONDITION): "zone",
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
vol.Required("zone"): cv.entity_ids,
# To support use_trigger_value in automation
# Deprecated 2016/04/25
vol.Optional("event"): vol.Any("enter", "leave"),
}
)
_OPTIONS_SCHEMA_DICT: dict[vol.Marker, Any] = {
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
vol.Required("zone"): cv.entity_ids,
}
_CONDITION_SCHEMA = vol.Schema({CONF_OPTIONS: _OPTIONS_SCHEMA_DICT})
def zone(
@@ -95,21 +93,34 @@ def zone(
class ZoneCondition(Condition):
"""Zone condition."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
_options: dict[str, Any]
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = move_top_level_schema_fields_to_options(
complete_config, _OPTIONS_SCHEMA_DICT
)
return await super().async_validate_complete_config(hass, complete_config)
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return _CONDITION_SCHEMA(config) # type: ignore[no-any-return]
return cast(ConfigType, _CONDITION_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
assert config.options is not None
self._options = config.options
async def async_get_checker(self) -> ConditionCheckerType:
"""Wrap action method with zone based condition."""
entity_ids = self._config.get(CONF_ENTITY_ID, [])
zone_entity_ids = self._config.get(CONF_ZONE, [])
entity_ids = self._options.get(CONF_ENTITY_ID, [])
zone_entity_ids = self._options.get(CONF_ZONE, [])
@trace_condition_function
def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:

View File

@@ -21,6 +21,7 @@ from homeassistant.const import (
)
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.trigger import (
Trigger,
@@ -28,7 +29,6 @@ from homeassistant.helpers.trigger import (
TriggerConfig,
TriggerData,
TriggerInfo,
move_top_level_schema_fields_to_options,
)
from homeassistant.helpers.typing import ConfigType

View File

@@ -20,13 +20,13 @@ from homeassistant.const import (
)
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.trigger import (
Trigger,
TriggerActionType,
TriggerConfig,
TriggerInfo,
move_top_level_schema_fields_to_options,
)
from homeassistant.helpers.typing import ConfigType

View File

@@ -1,5 +1,13 @@
"""Helpers for automation."""
from typing import Any
import voluptuous as vol
from homeassistant.const import CONF_OPTIONS
from .typing import ConfigType
def get_absolute_description_key(domain: str, key: str) -> str:
"""Return the absolute description key."""
@@ -19,3 +27,26 @@ def get_relative_description_key(domain: str, key: str) -> str:
if not subtype:
return "_"
return subtype[0]
def move_top_level_schema_fields_to_options(
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
) -> ConfigType:
"""Move top-level fields to options.
This function is used to help migrating old-style configs to new-style configs.
If options is already present, the config is returned as-is.
"""
if CONF_OPTIONS in config:
return config
config = config.copy()
options = config.setdefault(CONF_OPTIONS, {})
# Move top-level fields to options
for key_marked in options_schema_dict:
key = key_marked.schema
if key in config:
options[key] = config.pop(key)
return config

View File

@@ -6,6 +6,7 @@ import abc
from collections import deque
from collections.abc import Callable, Container, Coroutine, Generator, Iterable
from contextlib import contextmanager
from dataclasses import dataclass
from datetime import datetime, time as dt_time, timedelta
import functools as ft
import inspect
@@ -30,8 +31,10 @@ from homeassistant.const import (
CONF_FOR,
CONF_ID,
CONF_MATCH,
CONF_OPTIONS,
CONF_SELECTOR,
CONF_STATE,
CONF_TARGET,
CONF_VALUE_TEMPLATE,
CONF_WEEKDAY,
ENTITY_MATCH_ALL,
@@ -111,17 +114,17 @@ CONDITIONS: HassKey[dict[str, str]] = HassKey("conditions")
# Basic schemas to sanity check the condition descriptions,
# full validation is done by hassfest.conditions
_FIELD_SCHEMA = vol.Schema(
_FIELD_DESCRIPTION_SCHEMA = vol.Schema(
{
vol.Optional(CONF_SELECTOR): selector.validate_selector,
},
extra=vol.ALLOW_EXTRA,
)
_CONDITION_SCHEMA = vol.Schema(
_CONDITION_DESCRIPTION_SCHEMA = vol.Schema(
{
vol.Optional("target"): TargetSelector.CONFIG_SCHEMA,
vol.Optional("fields"): vol.Schema({str: _FIELD_SCHEMA}),
vol.Optional("fields"): vol.Schema({str: _FIELD_DESCRIPTION_SCHEMA}),
},
extra=vol.ALLOW_EXTRA,
)
@@ -134,10 +137,10 @@ def starts_with_dot(key: str) -> str:
return key
_CONDITIONS_SCHEMA = vol.Schema(
_CONDITIONS_DESCRIPTION_SCHEMA = vol.Schema(
{
vol.Remove(vol.All(str, starts_with_dot)): object,
cv.underscore_slug: vol.Any(None, _CONDITION_SCHEMA),
cv.underscore_slug: vol.Any(None, _CONDITION_DESCRIPTION_SCHEMA),
}
)
@@ -199,11 +202,43 @@ async def _register_condition_platform(
_LOGGER.exception("Error while notifying condition platform listener")
_CONDITION_SCHEMA = vol.Schema(
{
**cv.CONDITION_BASE_SCHEMA,
vol.Required(CONF_CONDITION): str,
vol.Optional(CONF_OPTIONS): object,
vol.Optional(CONF_TARGET): cv.TARGET_FIELDS,
}
)
class Condition(abc.ABC):
"""Condition class."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config.
The complete config includes fields that are generic to all conditions,
such as the alias.
This method should be overridden by conditions that need to migrate
from the old-style config.
"""
complete_config = _CONDITION_SCHEMA(complete_config)
specific_config: ConfigType = {}
for key in (CONF_OPTIONS, CONF_TARGET):
if key in complete_config:
specific_config[key] = complete_config.pop(key)
specific_config = await cls.async_validate_config(hass, specific_config)
for key in (CONF_OPTIONS, CONF_TARGET):
if key in specific_config:
complete_config[key] = specific_config[key]
return complete_config
@classmethod
@abc.abstractmethod
@@ -212,6 +247,9 @@ class Condition(abc.ABC):
) -> ConfigType:
"""Validate config."""
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
@abc.abstractmethod
async def async_get_checker(self) -> ConditionCheckerType:
"""Get the condition checker."""
@@ -226,6 +264,14 @@ class ConditionProtocol(Protocol):
"""Return the conditions provided by this integration."""
@dataclass(slots=True)
class ConditionConfig:
"""Condition config."""
options: dict[str, Any] | None = None
target: dict[str, Any] | None = None
type ConditionCheckerType = Callable[[HomeAssistant, TemplateVarsType], bool | None]
@@ -355,8 +401,15 @@ async def async_from_config(
relative_condition_key = get_relative_description_key(
platform_domain, condition_key
)
condition_instance = condition_descriptors[relative_condition_key](hass, config)
return await condition_instance.async_get_checker()
condition_cls = condition_descriptors[relative_condition_key]
condition = condition_cls(
hass,
ConditionConfig(
options=config.get(CONF_OPTIONS),
target=config.get(CONF_TARGET),
),
)
return await condition.async_get_checker()
for fmt in (ASYNC_FROM_CONFIG_FORMAT, FROM_CONFIG_FORMAT):
factory = getattr(sys.modules[__name__], fmt.format(condition_key), None)
@@ -989,9 +1042,9 @@ async def async_validate_condition_config(
)
if not (condition_class := condition_descriptors.get(relative_condition_key)):
raise vol.Invalid(f"Invalid condition '{condition_key}' specified")
return await condition_class.async_validate_config(hass, config)
return await condition_class.async_validate_complete_config(hass, config)
if platform is None and condition_key in ("numeric_state", "state"):
if condition_key in ("numeric_state", "state"):
validator = cast(
Callable[[HomeAssistant, ConfigType], ConfigType],
getattr(
@@ -1111,7 +1164,7 @@ def _load_conditions_file(integration: Integration) -> dict[str, Any]:
try:
return cast(
dict[str, Any],
_CONDITIONS_SCHEMA(
_CONDITIONS_DESCRIPTION_SCHEMA(
load_yaml_dict(str(integration.file_path / "conditions.yaml"))
),
)

View File

@@ -1545,9 +1545,6 @@ STATE_CONDITION_BASE_SCHEMA = {
),
vol.Optional(CONF_ATTRIBUTE): str,
vol.Optional(CONF_FOR): positive_time_period_template,
# To support use_trigger_value in automation
# Deprecated 2016/04/25
vol.Optional("from"): str,
}
STATE_CONDITION_STATE_SCHEMA = vol.Schema(

View File

@@ -401,29 +401,6 @@ class PluggableAction:
await task
def move_top_level_schema_fields_to_options(
config: ConfigType, options_schema_dict: dict[vol.Marker, Any]
) -> ConfigType:
"""Move top-level fields to options.
This function is used to help migrating old-style configs to new-style configs.
If options is already present, the config is returned as-is.
"""
if CONF_OPTIONS in config:
return config
config = config.copy()
options = config.setdefault(CONF_OPTIONS, {})
# Move top-level fields to options
for key_marked in options_schema_dict:
key = key_marked.schema
if key in config:
options[key] = config.pop(key)
return config
async def _async_get_trigger_platform(
hass: HomeAssistant, trigger_key: str
) -> tuple[str, TriggerProtocol]:

View File

@@ -36,10 +36,10 @@ fnv-hash-fast==1.5.0
go2rtc-client==0.2.1
ha-ffmpeg==3.2.2
habluetooth==5.6.4
hass-nabucasa==1.1.1
hass-nabucasa==1.1.2
hassil==3.2.0
home-assistant-bluetooth==1.13.1
home-assistant-frontend==20250924.0
home-assistant-frontend==20250925.1
home-assistant-intents==2025.9.24
httpx==0.28.1
ifaddr==0.2.0

View File

@@ -47,7 +47,7 @@ dependencies = [
"fnv-hash-fast==1.5.0",
# hass-nabucasa is imported by helpers which don't depend on the cloud
# integration
"hass-nabucasa==1.1.1",
"hass-nabucasa==1.1.2",
# When bumping httpx, please check the version pins of
# httpcore, anyio, and h11 in gen_requirements_all
"httpx==0.28.1",

2
requirements.txt generated
View File

@@ -22,7 +22,7 @@ certifi>=2021.5.30
ciso8601==2.3.3
cronsim==2.6
fnv-hash-fast==1.5.0
hass-nabucasa==1.1.1
hass-nabucasa==1.1.2
httpx==0.28.1
home-assistant-bluetooth==1.13.1
ifaddr==0.2.0

23
requirements_all.txt generated
View File

@@ -131,7 +131,7 @@ TwitterAPI==2.7.12
WSDiscovery==2.1.2
# homeassistant.components.accuweather
accuweather==4.2.1
accuweather==4.2.2
# homeassistant.components.adax
adax==0.4.0
@@ -185,7 +185,7 @@ aioairzone-cloud==0.7.2
aioairzone==1.0.1
# homeassistant.components.alexa_devices
aioamazondevices==6.0.0
aioamazondevices==6.2.6
# homeassistant.components.ambient_network
# homeassistant.components.ambient_station
@@ -247,7 +247,7 @@ aioelectricitymaps==1.1.1
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==41.9.4
aioesphomeapi==41.10.0
# homeassistant.components.flo
aioflo==2021.11.0
@@ -268,7 +268,7 @@ aioharmony==0.5.3
aiohasupervisor==0.3.3b0
# homeassistant.components.home_connect
aiohomeconnect==0.19.0
aiohomeconnect==0.20.0
# homeassistant.components.homekit_controller
aiohomekit==3.2.18
@@ -375,7 +375,7 @@ aioridwell==2025.09.0
aioruckus==0.42
# homeassistant.components.russound_rio
aiorussound==4.8.1
aiorussound==4.8.2
# homeassistant.components.ruuvi_gateway
aioruuvigateway==0.1.0
@@ -1145,7 +1145,7 @@ habiticalib==0.4.5
habluetooth==5.6.4
# homeassistant.components.cloud
hass-nabucasa==1.1.1
hass-nabucasa==1.1.2
# homeassistant.components.splunk
hass-splunk==0.1.1
@@ -1186,7 +1186,7 @@ hole==0.9.0
holidays==0.81
# homeassistant.components.frontend
home-assistant-frontend==20250924.0
home-assistant-frontend==20250925.1
# homeassistant.components.conversation
home-assistant-intents==2025.9.24
@@ -1364,7 +1364,7 @@ libpyfoscamcgi==0.0.7
libpyvivotek==0.4.0
# homeassistant.components.libre_hardware_monitor
librehardwaremonitor-api==1.3.1
librehardwaremonitor-api==1.4.0
# homeassistant.components.mikrotik
librouteros==3.2.0
@@ -1499,6 +1499,9 @@ mutagen==1.47.0
# homeassistant.components.mutesync
mutesync==0.0.1
# homeassistant.components.mvglive
mvg==1.4.0
# homeassistant.components.permobil
mypermobil==0.1.8
@@ -2381,7 +2384,7 @@ pysmappee==0.2.29
pysmarlaapi==0.9.2
# homeassistant.components.smartthings
pysmartthings==3.2.9
pysmartthings==3.3.0
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -3232,7 +3235,7 @@ zeroconf==0.147.2
zeversolar==0.3.2
# homeassistant.components.zha
zha==0.0.72
zha==0.0.73
# homeassistant.components.zhong_hong
zhong-hong-hvac==1.0.13

View File

@@ -119,7 +119,7 @@ Tami4EdgeAPI==3.0
WSDiscovery==2.1.2
# homeassistant.components.accuweather
accuweather==4.2.1
accuweather==4.2.2
# homeassistant.components.adax
adax==0.4.0
@@ -173,7 +173,7 @@ aioairzone-cloud==0.7.2
aioairzone==1.0.1
# homeassistant.components.alexa_devices
aioamazondevices==6.0.0
aioamazondevices==6.2.6
# homeassistant.components.ambient_network
# homeassistant.components.ambient_station
@@ -235,7 +235,7 @@ aioelectricitymaps==1.1.1
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==41.9.4
aioesphomeapi==41.10.0
# homeassistant.components.flo
aioflo==2021.11.0
@@ -253,7 +253,7 @@ aioharmony==0.5.3
aiohasupervisor==0.3.3b0
# homeassistant.components.home_connect
aiohomeconnect==0.19.0
aiohomeconnect==0.20.0
# homeassistant.components.homekit_controller
aiohomekit==3.2.18
@@ -357,7 +357,7 @@ aioridwell==2025.09.0
aioruckus==0.42
# homeassistant.components.russound_rio
aiorussound==4.8.1
aiorussound==4.8.2
# homeassistant.components.ruuvi_gateway
aioruuvigateway==0.1.0
@@ -1006,7 +1006,7 @@ habiticalib==0.4.5
habluetooth==5.6.4
# homeassistant.components.cloud
hass-nabucasa==1.1.1
hass-nabucasa==1.1.2
# homeassistant.components.assist_satellite
# homeassistant.components.conversation
@@ -1035,7 +1035,7 @@ hole==0.9.0
holidays==0.81
# homeassistant.components.frontend
home-assistant-frontend==20250924.0
home-assistant-frontend==20250925.1
# homeassistant.components.conversation
home-assistant-intents==2025.9.24
@@ -1180,7 +1180,7 @@ letpot==0.6.2
libpyfoscamcgi==0.0.7
# homeassistant.components.libre_hardware_monitor
librehardwaremonitor-api==1.3.1
librehardwaremonitor-api==1.4.0
# homeassistant.components.mikrotik
librouteros==3.2.0
@@ -1987,7 +1987,7 @@ pysmappee==0.2.29
pysmarlaapi==0.9.2
# homeassistant.components.smartthings
pysmartthings==3.2.9
pysmartthings==3.3.0
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -2682,7 +2682,7 @@ zeroconf==0.147.2
zeversolar==0.3.2
# homeassistant.components.zha
zha==0.0.72
zha==0.0.73
# homeassistant.components.zwave_js
zwave-js-server-python==0.67.1

View File

@@ -18,15 +18,13 @@ TEST_DEVICE_1 = AmazonDevice(
online=True,
serial_number=TEST_DEVICE_1_SN,
software_version="echo_test_software_version",
do_not_disturb=False,
response_style=None,
bluetooth_state=True,
entity_id="11111111-2222-3333-4444-555555555555",
appliance_id="G1234567890123456789012345678A",
endpoint_id="G1234567890123456789012345678A",
sensors={
"dnd": AmazonDeviceSensor(name="dnd", value=False, error=False, scale=None),
"temperature": AmazonDeviceSensor(
name="temperature", value="22.5", scale="CELSIUS"
)
name="temperature", value="22.5", error=False, scale="CELSIUS"
),
},
)
@@ -42,14 +40,11 @@ TEST_DEVICE_2 = AmazonDevice(
online=True,
serial_number=TEST_DEVICE_2_SN,
software_version="echo_test_2_software_version",
do_not_disturb=False,
response_style=None,
bluetooth_state=True,
entity_id="11111111-2222-3333-4444-555555555555",
appliance_id="G1234567890123456789012345678A",
endpoint_id="G1234567890123456789012345678A",
sensors={
"temperature": AmazonDeviceSensor(
name="temperature", value="22.5", scale="CELSIUS"
name="temperature", value="22.5", error=False, scale="CELSIUS"
)
},
)

View File

@@ -1,52 +1,4 @@
# serializer version: 1
# name: test_all_entities[binary_sensor.echo_test_bluetooth-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'binary_sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'binary_sensor.echo_test_bluetooth',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Bluetooth',
'platform': 'alexa_devices',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'bluetooth',
'unique_id': 'echo_test_serial_number-bluetooth',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[binary_sensor.echo_test_bluetooth-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Echo Test Bluetooth',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.echo_test_bluetooth',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_all_entities[binary_sensor.echo_test_connectivity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -2,7 +2,6 @@
# name: test_device_diagnostics
dict({
'account name': 'Echo Test',
'bluetooth state': True,
'capabilities': list([
'AUDIO_PLAYER',
'MICROPHONE',
@@ -12,9 +11,17 @@
]),
'device family': 'mine',
'device type': 'echo',
'do not disturb': False,
'online': True,
'response style': None,
'sensors': dict({
'dnd': dict({
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, scale=None)",
}),
'temperature': dict({
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, scale='CELSIUS')",
}),
}),
'serial number': 'echo_test_serial_number',
'software version': 'echo_test_software_version',
})
@@ -25,7 +32,6 @@
'devices': list([
dict({
'account name': 'Echo Test',
'bluetooth state': True,
'capabilities': list([
'AUDIO_PLAYER',
'MICROPHONE',
@@ -35,9 +41,17 @@
]),
'device family': 'mine',
'device type': 'echo',
'do not disturb': False,
'online': True,
'response style': None,
'sensors': dict({
'dnd': dict({
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
'repr': "AmazonDeviceSensor(name='dnd', value=False, error=False, scale=None)",
}),
'temperature': dict({
'__type': "<class 'aioamazondevices.api.AmazonDeviceSensor'>",
'repr': "AmazonDeviceSensor(name='temperature', value='22.5', error=False, scale='CELSIUS')",
}),
}),
'serial number': 'echo_test_serial_number',
'software version': 'echo_test_software_version',
}),

View File

@@ -4,8 +4,6 @@
tuple(
dict({
'account_name': 'Echo Test',
'appliance_id': 'G1234567890123456789012345678A',
'bluetooth_state': True,
'capabilities': list([
'AUDIO_PLAYER',
'MICROPHONE',
@@ -16,12 +14,18 @@
'device_family': 'mine',
'device_owner_customer_id': 'amazon_ower_id',
'device_type': 'echo',
'do_not_disturb': False,
'endpoint_id': 'G1234567890123456789012345678A',
'entity_id': '11111111-2222-3333-4444-555555555555',
'online': True,
'response_style': None,
'sensors': dict({
'dnd': dict({
'error': False,
'name': 'dnd',
'scale': None,
'value': False,
}),
'temperature': dict({
'error': False,
'name': 'temperature',
'scale': 'CELSIUS',
'value': '22.5',
@@ -41,8 +45,6 @@
tuple(
dict({
'account_name': 'Echo Test',
'appliance_id': 'G1234567890123456789012345678A',
'bluetooth_state': True,
'capabilities': list([
'AUDIO_PLAYER',
'MICROPHONE',
@@ -53,12 +55,18 @@
'device_family': 'mine',
'device_owner_customer_id': 'amazon_ower_id',
'device_type': 'echo',
'do_not_disturb': False,
'endpoint_id': 'G1234567890123456789012345678A',
'entity_id': '11111111-2222-3333-4444-555555555555',
'online': True,
'response_style': None,
'sensors': dict({
'dnd': dict({
'error': False,
'name': 'dnd',
'scale': None,
'value': False,
}),
'temperature': dict({
'error': False,
'name': 'temperature',
'scale': 'CELSIUS',
'value': '22.5',

View File

@@ -30,7 +30,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'do_not_disturb',
'unique_id': 'echo_test_serial_number-do_not_disturb',
'unique_id': 'echo_test_serial_number-dnd',
'unit_of_measurement': None,
})
# ---

View File

@@ -123,6 +123,8 @@ async def test_dynamic_device(
assert (state := hass.states.get(entity_id_1))
assert state.state == STATE_ON
assert not hass.states.get(entity_id_2)
mock_amazon_devices_client.get_devices_data.return_value = {
TEST_DEVICE_1_SN: TEST_DEVICE_1,
TEST_DEVICE_2_SN: TEST_DEVICE_2,

View File

@@ -134,10 +134,38 @@ async def test_unit_of_measurement(
mock_amazon_devices_client.get_devices_data.return_value[
TEST_DEVICE_1_SN
].sensors = {sensor: AmazonDeviceSensor(name=sensor, value=api_value, scale=scale)}
].sensors = {
sensor: AmazonDeviceSensor(
name=sensor, value=api_value, error=False, scale=scale
)
}
await setup_integration(hass, mock_config_entry)
assert (state := hass.states.get(entity_id))
assert state.state == state_value
assert state.attributes["unit_of_measurement"] == unit
async def test_sensor_unavailable(
hass: HomeAssistant,
freezer: FrozenDateTimeFactory,
mock_amazon_devices_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test sensor is unavailable."""
entity_id = "sensor.echo_test_illuminance"
mock_amazon_devices_client.get_devices_data.return_value[
TEST_DEVICE_1_SN
].sensors = {
"illuminance": AmazonDeviceSensor(
name="illuminance", value="800", error=True, scale=None
)
}
await setup_integration(hass, mock_config_entry)
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNAVAILABLE

View File

@@ -1,7 +1,9 @@
"""Tests for the Alexa Devices switch platform."""
from copy import deepcopy
from unittest.mock import AsyncMock, patch
from aioamazondevices.api import AmazonDeviceSensor
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -23,10 +25,12 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from .conftest import TEST_DEVICE_1_SN
from .conftest import TEST_DEVICE_1, TEST_DEVICE_1_SN
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
ENTITY_ID = "switch.echo_test_do_not_disturb"
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_all_entities(
@@ -52,48 +56,59 @@ async def test_switch_dnd(
"""Test switching DND."""
await setup_integration(hass, mock_config_entry)
entity_id = "switch.echo_test_do_not_disturb"
assert (state := hass.states.get(entity_id))
assert (state := hass.states.get(ENTITY_ID))
assert state.state == STATE_OFF
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity_id},
{ATTR_ENTITY_ID: ENTITY_ID},
blocking=True,
)
assert mock_amazon_devices_client.set_do_not_disturb.call_count == 1
mock_amazon_devices_client.get_devices_data.return_value[
TEST_DEVICE_1_SN
].do_not_disturb = True
device_data = deepcopy(TEST_DEVICE_1)
device_data.sensors = {
"dnd": AmazonDeviceSensor(name="dnd", value=True, error=False, scale=None),
"temperature": AmazonDeviceSensor(
name="temperature", value="22.5", error=False, scale="CELSIUS"
),
}
mock_amazon_devices_client.get_devices_data.return_value = {
TEST_DEVICE_1_SN: device_data
}
freezer.tick(SCAN_INTERVAL)
async_fire_time_changed(hass)
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id))
assert (state := hass.states.get(ENTITY_ID))
assert state.state == STATE_ON
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: entity_id},
{ATTR_ENTITY_ID: ENTITY_ID},
blocking=True,
)
mock_amazon_devices_client.get_devices_data.return_value[
TEST_DEVICE_1_SN
].do_not_disturb = False
device_data.sensors = {
"dnd": AmazonDeviceSensor(name="dnd", value=False, error=False, scale=None),
"temperature": AmazonDeviceSensor(
name="temperature", value="22.5", error=False, scale="CELSIUS"
),
}
mock_amazon_devices_client.get_devices_data.return_value = {
TEST_DEVICE_1_SN: device_data
}
freezer.tick(SCAN_INTERVAL)
async_fire_time_changed(hass)
await hass.async_block_till_done()
assert mock_amazon_devices_client.set_do_not_disturb.call_count == 2
assert (state := hass.states.get(entity_id))
assert (state := hass.states.get(ENTITY_ID))
assert state.state == STATE_OFF
@@ -104,16 +119,13 @@ async def test_offline_device(
mock_config_entry: MockConfigEntry,
) -> None:
"""Test offline device handling."""
entity_id = "switch.echo_test_do_not_disturb"
mock_amazon_devices_client.get_devices_data.return_value[
TEST_DEVICE_1_SN
].online = False
await setup_integration(hass, mock_config_entry)
assert (state := hass.states.get(entity_id))
assert (state := hass.states.get(ENTITY_ID))
assert state.state == STATE_UNAVAILABLE
mock_amazon_devices_client.get_devices_data.return_value[
@@ -124,5 +136,5 @@ async def test_offline_device(
async_fire_time_changed(hass)
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id))
assert (state := hass.states.get(ENTITY_ID))
assert state.state != STATE_UNAVAILABLE

View File

@@ -10,8 +10,10 @@ from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TUR
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
from . import setup_integration
from .const import TEST_DEVICE_1_SN
from tests.common import MockConfigEntry
@@ -54,3 +56,41 @@ async def test_alexa_api_call_exceptions(
assert exc_info.value.translation_domain == DOMAIN
assert exc_info.value.translation_key == key
assert exc_info.value.translation_placeholders == {"error": error}
async def test_alexa_unique_id_migration(
hass: HomeAssistant,
mock_amazon_devices_client: AsyncMock,
mock_config_entry: MockConfigEntry,
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test unique_id migration."""
mock_config_entry.add_to_hass(hass)
device = device_registry.async_get_or_create(
config_entry_id=mock_config_entry.entry_id,
identifiers={(DOMAIN, mock_config_entry.entry_id)},
name=mock_config_entry.title,
manufacturer="Amazon",
model="Echo Dot",
entry_type=dr.DeviceEntryType.SERVICE,
)
entity = entity_registry.async_get_or_create(
SWITCH_DOMAIN,
DOMAIN,
unique_id=f"{TEST_DEVICE_1_SN}-do_not_disturb",
device_id=device.id,
config_entry=mock_config_entry,
has_entity_name=True,
)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
migrated_entity = entity_registry.async_get(entity.entity_id)
assert migrated_entity is not None
assert migrated_entity.config_entry_id == mock_config_entry.entry_id
assert migrated_entity.unique_id == f"{TEST_DEVICE_1_SN}-dnd"

View File

@@ -20,13 +20,14 @@ from aiocomelit.const import (
BRIDGE_HOST = "fake_bridge_host"
BRIDGE_PORT = 80
BRIDGE_PIN = 1234
BRIDGE_PIN = "1234"
VEDO_HOST = "fake_vedo_host"
VEDO_PORT = 8080
VEDO_PIN = 5678
VEDO_PIN = "5678"
FAKE_PIN = 0000
FAKE_PIN = "0000"
BAD_PIN = "abcd"
LIGHT0 = ComelitSerialBridgeObject(
index=0,

View File

@@ -10,9 +10,10 @@ from homeassistant.components.comelit.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.data_entry_flow import FlowResultType, InvalidData
from .const import (
BAD_PIN,
BRIDGE_HOST,
BRIDGE_PIN,
BRIDGE_PORT,
@@ -310,3 +311,46 @@ async def test_reconfigure_fails(
CONF_PIN: BRIDGE_PIN,
CONF_TYPE: BRIDGE,
}
async def test_pin_format_serial_bridge(
hass: HomeAssistant,
mock_serial_bridge: AsyncMock,
mock_serial_bridge_config_entry: MockConfigEntry,
) -> None:
"""Test PIN is valid format."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "user"
with pytest.raises(InvalidData):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: BRIDGE_HOST,
CONF_PORT: BRIDGE_PORT,
CONF_PIN: BAD_PIN,
},
)
assert result["type"] is FlowResultType.FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: BRIDGE_HOST,
CONF_PORT: BRIDGE_PORT,
CONF_PIN: BRIDGE_PIN,
},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["data"] == {
CONF_HOST: BRIDGE_HOST,
CONF_PORT: BRIDGE_PORT,
CONF_PIN: BRIDGE_PIN,
CONF_TYPE: BRIDGE,
}
assert not result["result"].unique_id
await hass.async_block_till_done()

View File

@@ -30,7 +30,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'status',
'unique_id': 'portainer_test_entry_123_dd19facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
'unique_id': 'portainer_test_entry_123_focused_einstein_status',
'unit_of_measurement': None,
})
# ---
@@ -79,7 +79,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'status',
'unique_id': 'portainer_test_entry_123_aa86eacfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
'unique_id': 'portainer_test_entry_123_funny_chatelet_status',
'unit_of_measurement': None,
})
# ---
@@ -177,7 +177,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'status',
'unique_id': 'portainer_test_entry_123_ee20facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
'unique_id': 'portainer_test_entry_123_practical_morse_status',
'unit_of_measurement': None,
})
# ---
@@ -226,7 +226,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'status',
'unique_id': 'portainer_test_entry_123_bb97facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
'unique_id': 'portainer_test_entry_123_serene_banach_status',
'unit_of_measurement': None,
})
# ---
@@ -275,7 +275,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'status',
'unique_id': 'portainer_test_entry_123_cc08facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf_status',
'unique_id': 'portainer_test_entry_123_stoic_turing_status',
'unit_of_measurement': None,
})
# ---

View File

@@ -5,6 +5,7 @@ from datetime import timedelta
from unittest.mock import patch
import pytest
from roborock import MultiMapsList
from roborock.exceptions import RoborockException
from vacuum_map_parser_base.config.color import SupportedColor
@@ -135,3 +136,30 @@ async def test_dynamic_local_scan_interval(
async_fire_time_changed(hass, dt_util.utcnow() + interval)
assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "20"
async def test_no_maps(
hass: HomeAssistant,
mock_roborock_entry: MockConfigEntry,
bypass_api_fixture: None,
) -> None:
"""Test that a device with no maps is handled correctly."""
prop = copy.deepcopy(PROP)
prop.status.map_status = 252
with (
patch(
"homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop",
return_value=prop,
),
patch(
"homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_multi_maps_list",
return_value=MultiMapsList(
max_multi_map=1, max_bak_map=1, multi_map_count=0, map_info=[]
),
),
patch(
"homeassistant.components.roborock.RoborockMqttClientV1.load_multi_map"
) as load_map,
):
await hass.config_entries.async_setup(mock_roborock_entry.entry_id)
assert load_map.call_count == 0

View File

@@ -83,7 +83,10 @@ async def test_if_action_before_sunrise_no_offset(
automation.DOMAIN: {
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "sun", "before": SUN_EVENT_SUNRISE},
"condition": {
"condition": "sun",
"options": {"before": SUN_EVENT_SUNRISE},
},
"action": {"service": "test.automation"},
}
},
@@ -156,7 +159,10 @@ async def test_if_action_after_sunrise_no_offset(
automation.DOMAIN: {
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "sun", "after": SUN_EVENT_SUNRISE},
"condition": {
"condition": "sun",
"options": {"after": SUN_EVENT_SUNRISE},
},
"action": {"service": "test.automation"},
}
},
@@ -231,8 +237,10 @@ async def test_if_action_before_sunrise_with_offset(
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {
"condition": "sun",
"before": SUN_EVENT_SUNRISE,
"before_offset": "+1:00:00",
"options": {
"before": SUN_EVENT_SUNRISE,
"before_offset": "+1:00:00",
},
},
"action": {"service": "test.automation"},
}
@@ -356,8 +364,7 @@ async def test_if_action_before_sunset_with_offset(
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {
"condition": "sun",
"before": "sunset",
"before_offset": "+1:00:00",
"options": {"before": "sunset", "before_offset": "+1:00:00"},
},
"action": {"service": "test.automation"},
}
@@ -481,8 +488,7 @@ async def test_if_action_after_sunrise_with_offset(
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {
"condition": "sun",
"after": SUN_EVENT_SUNRISE,
"after_offset": "+1:00:00",
"options": {"after": SUN_EVENT_SUNRISE, "after_offset": "+1:00:00"},
},
"action": {"service": "test.automation"},
}
@@ -630,8 +636,7 @@ async def test_if_action_after_sunset_with_offset(
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {
"condition": "sun",
"after": "sunset",
"after_offset": "+1:00:00",
"options": {"after": "sunset", "after_offset": "+1:00:00"},
},
"action": {"service": "test.automation"},
}
@@ -707,8 +712,7 @@ async def test_if_action_after_and_before_during(
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {
"condition": "sun",
"after": SUN_EVENT_SUNRISE,
"before": SUN_EVENT_SUNSET,
"options": {"after": SUN_EVENT_SUNRISE, "before": SUN_EVENT_SUNSET},
},
"action": {"service": "test.automation"},
}
@@ -812,8 +816,7 @@ async def test_if_action_before_or_after_during(
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {
"condition": "sun",
"before": SUN_EVENT_SUNRISE,
"after": SUN_EVENT_SUNSET,
"options": {"before": SUN_EVENT_SUNRISE, "after": SUN_EVENT_SUNSET},
},
"action": {"service": "test.automation"},
}
@@ -941,7 +944,10 @@ async def test_if_action_before_sunrise_no_offset_kotzebue(
automation.DOMAIN: {
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "sun", "before": SUN_EVENT_SUNRISE},
"condition": {
"condition": "sun",
"options": {"before": SUN_EVENT_SUNRISE},
},
"action": {"service": "test.automation"},
}
},
@@ -1020,7 +1026,10 @@ async def test_if_action_after_sunrise_no_offset_kotzebue(
automation.DOMAIN: {
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "sun", "after": SUN_EVENT_SUNRISE},
"condition": {
"condition": "sun",
"options": {"after": SUN_EVENT_SUNRISE},
},
"action": {"service": "test.automation"},
}
},
@@ -1099,7 +1108,10 @@ async def test_if_action_before_sunset_no_offset_kotzebue(
automation.DOMAIN: {
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "sun", "before": SUN_EVENT_SUNSET},
"condition": {
"condition": "sun",
"options": {"before": SUN_EVENT_SUNSET},
},
"action": {"service": "test.automation"},
}
},
@@ -1178,7 +1190,10 @@ async def test_if_action_after_sunset_no_offset_kotzebue(
automation.DOMAIN: {
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "sun", "after": SUN_EVENT_SUNSET},
"condition": {
"condition": "sun",
"options": {"after": SUN_EVENT_SUNSET},
},
"action": {"service": "test.automation"},
}
},

View File

@@ -62,9 +62,15 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
"""Test function with actual service call events in database."""
user_id = str(uuid.uuid4())
hass.states.async_set("light.living_room", "off")
hass.states.async_set("light.kitchen", "off")
hass.states.async_set("climate.thermostat", "off")
hass.states.async_set("light.bedroom", "off")
hass.states.async_set("lock.front_door", "locked")
# Create service call events at different times of day
# Morning events - use separate service calls to get around context deduplication
with freeze_time("2023-07-01 07:00:00+00:00"): # Morning
with freeze_time("2023-07-01 07:00:00"): # Morning
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
@@ -77,7 +83,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
await hass.async_block_till_done()
# Afternoon events
with freeze_time("2023-07-01 14:00:00+00:00"): # Afternoon
with freeze_time("2023-07-01 14:00:00"): # Afternoon
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
@@ -90,7 +96,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
await hass.async_block_till_done()
# Evening events
with freeze_time("2023-07-01 19:00:00+00:00"): # Evening
with freeze_time("2023-07-01 19:00:00"): # Evening
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
@@ -103,7 +109,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
await hass.async_block_till_done()
# Night events
with freeze_time("2023-07-01 23:00:00+00:00"): # Night
with freeze_time("2023-07-01 23:00:00"): # Night
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
@@ -119,7 +125,7 @@ async def test_with_service_calls(hass: HomeAssistant) -> None:
await async_wait_recording_done(hass)
# Get predictions - make sure we're still in a reasonable timeframe
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
results = await async_predict_common_control(hass, user_id)
# Verify results contain the expected entities in the correct time periods
@@ -151,7 +157,12 @@ async def test_multiple_entities_in_one_call(hass: HomeAssistant) -> None:
suggested_object_id="kitchen",
)
with freeze_time("2023-07-01 10:00:00+00:00"): # Morning
hass.states.async_set("light.living_room", "off")
hass.states.async_set("light.kitchen", "off")
hass.states.async_set("light.hallway", "off")
hass.states.async_set("not_allowed.domain", "off")
with freeze_time("2023-07-01 10:00:00"): # Morning
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
@@ -163,6 +174,7 @@ async def test_multiple_entities_in_one_call(hass: HomeAssistant) -> None:
"light.kitchen",
"light.hallway",
"not_allowed.domain",
"light.not_in_state_machine",
]
},
},
@@ -172,7 +184,7 @@ async def test_multiple_entities_in_one_call(hass: HomeAssistant) -> None:
await async_wait_recording_done(hass)
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
results = await async_predict_common_control(hass, user_id)
# Two lights should be counted (10:00 UTC = 02:00 local = night)
@@ -189,7 +201,10 @@ async def test_context_deduplication(hass: HomeAssistant) -> None:
user_id = str(uuid.uuid4())
context = Context(user_id=user_id)
with freeze_time("2023-07-01 10:00:00+00:00"): # Morning
hass.states.async_set("light.living_room", "off")
hass.states.async_set("switch.coffee_maker", "off")
with freeze_time("2023-07-01 10:00:00"): # Morning
# Fire multiple events with the same context
hass.bus.async_fire(
EVENT_CALL_SERVICE,
@@ -215,7 +230,7 @@ async def test_context_deduplication(hass: HomeAssistant) -> None:
await async_wait_recording_done(hass)
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
results = await async_predict_common_control(hass, user_id)
# Only the first event should be processed (10:00 UTC = 02:00 local = night)
@@ -232,8 +247,11 @@ async def test_old_events_excluded(hass: HomeAssistant) -> None:
"""Test that events older than 30 days are excluded."""
user_id = str(uuid.uuid4())
hass.states.async_set("light.old_event", "off")
hass.states.async_set("light.recent_event", "off")
# Create an old event (35 days ago)
with freeze_time("2023-05-27 10:00:00+00:00"): # 35 days before July 1st
with freeze_time("2023-05-27 10:00:00"): # 35 days before July 1st
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
@@ -246,7 +264,7 @@ async def test_old_events_excluded(hass: HomeAssistant) -> None:
await hass.async_block_till_done()
# Create a recent event (5 days ago)
with freeze_time("2023-06-26 10:00:00+00:00"): # 5 days before July 1st
with freeze_time("2023-06-26 10:00:00"): # 5 days before July 1st
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
@@ -261,7 +279,7 @@ async def test_old_events_excluded(hass: HomeAssistant) -> None:
await async_wait_recording_done(hass)
# Query with current time
with freeze_time("2023-07-01 10:00:00+00:00"):
with freeze_time("2023-07-01 10:00:00"):
results = await async_predict_common_control(hass, user_id)
# Only recent event should be included (10:00 UTC = 02:00 local = night)
@@ -278,8 +296,16 @@ async def test_entities_limit(hass: HomeAssistant) -> None:
"""Test that only top entities are returned per time category."""
user_id = str(uuid.uuid4())
hass.states.async_set("light.most_used", "off")
hass.states.async_set("light.second", "off")
hass.states.async_set("light.third", "off")
hass.states.async_set("light.fourth", "off")
hass.states.async_set("light.fifth", "off")
hass.states.async_set("light.sixth", "off")
hass.states.async_set("light.seventh", "off")
# Create more than 5 different entities in morning
with freeze_time("2023-07-01 08:00:00+00:00"):
with freeze_time("2023-07-01 08:00:00"):
# Create entities with different frequencies
entities_with_counts = [
("light.most_used", 10),
@@ -308,7 +334,7 @@ async def test_entities_limit(hass: HomeAssistant) -> None:
await async_wait_recording_done(hass)
with (
freeze_time("2023-07-02 10:00:00+00:00"),
freeze_time("2023-07-02 10:00:00"),
patch(
"homeassistant.components.usage_prediction.common_control.RESULTS_TO_INCLUDE",
5,
@@ -335,7 +361,10 @@ async def test_different_users_separated(hass: HomeAssistant) -> None:
user_id_1 = str(uuid.uuid4())
user_id_2 = str(uuid.uuid4())
with freeze_time("2023-07-01 10:00:00+00:00"):
hass.states.async_set("light.user1_light", "off")
hass.states.async_set("light.user2_light", "off")
with freeze_time("2023-07-01 10:00:00"):
# User 1 events
hass.bus.async_fire(
EVENT_CALL_SERVICE,
@@ -363,7 +392,7 @@ async def test_different_users_separated(hass: HomeAssistant) -> None:
await async_wait_recording_done(hass)
# Get results for each user
with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent
with freeze_time("2023-07-02 10:00:00"): # Next day, so events are recent
results_user1 = await async_predict_common_control(hass, user_id_1)
results_user2 = await async_predict_common_control(hass, user_id_2)

View File

@@ -708,8 +708,8 @@ async def test_multiple_zha_entries_aborts(hass: HomeAssistant, mock_app) -> Non
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
async def test_discovery_via_usb_path_does_not_change(hass: HomeAssistant) -> None:
"""Test usb flow already set up and the path does not change."""
async def test_discovery_via_usb_duplicate_unique_id(hass: HomeAssistant) -> None:
"""Test USB discovery when a config entry with a duplicate unique_id already exists."""
entry = MockConfigEntry(
domain=DOMAIN,
@@ -737,13 +737,8 @@ async def test_discovery_via_usb_path_does_not_change(hass: HomeAssistant) -> No
)
await hass.async_block_till_done()
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
assert entry.data[CONF_DEVICE] == {
CONF_DEVICE_PATH: "/dev/ttyUSB1",
CONF_BAUDRATE: 115200,
CONF_FLOW_CONTROL: None,
}
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "confirm"
@patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True))
@@ -857,6 +852,40 @@ async def test_discovery_via_usb_zha_ignored_updates(hass: HomeAssistant) -> Non
}
async def test_discovery_via_usb_same_device_already_setup(hass: HomeAssistant) -> None:
"""Test discovery aborting if ZHA is already setup."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_DEVICE: {CONF_DEVICE_PATH: "/dev/serial/by-id/usb-device123"}},
).add_to_hass(hass)
# Discovery info with the same device but different path format
discovery_info = UsbServiceInfo(
device="/dev/ttyUSB0",
pid="AAAA",
vid="AAAA",
serial_number="1234",
description="zigbee radio",
manufacturer="test",
)
with patch(
"homeassistant.components.zha.config_flow.usb.get_serial_by_id",
return_value="/dev/serial/by-id/usb-device123",
) as mock_get_serial_by_id:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USB}, data=discovery_info
)
await hass.async_block_till_done()
# Verify get_serial_by_id was called to normalize the path
assert mock_get_serial_by_id.mock_calls == [call("/dev/ttyUSB0")]
# Should abort since it's the same device
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "single_instance_allowed"
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
@patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True))
async def test_legacy_zeroconf_discovery_already_setup(hass: HomeAssistant) -> None:
@@ -890,6 +919,39 @@ async def test_legacy_zeroconf_discovery_already_setup(hass: HomeAssistant) -> N
assert confirm_result["step_id"] == "choose_migration_strategy"
async def test_zeroconf_discovery_via_socket_already_setup_with_ip_match(
hass: HomeAssistant,
) -> None:
"""Test zeroconf discovery aborting when ZHA is already setup with socket and one IP matches."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_DEVICE: {CONF_DEVICE_PATH: "socket://192.168.1.101:6638"}},
).add_to_hass(hass)
service_info = ZeroconfServiceInfo(
ip_address=ip_address("192.168.1.100"),
ip_addresses=[
ip_address("192.168.1.100"),
ip_address("192.168.1.101"), # Matches config entry
],
hostname="tube-zigbee-gw.local.",
name="mock_name",
port=6638,
properties={"name": "tube_123456"},
type="mock_type",
)
# Discovery should abort due to single instance check
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info
)
await hass.async_block_till_done()
# Should abort since one of the advertised IPs matches existing socket path
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "single_instance_allowed"
@patch(
"homeassistant.components.zha.radio_manager.ZhaRadioManager.detect_radio_type",
mock_detect_radio_type(radio_type=RadioType.deconz),
@@ -2289,34 +2351,28 @@ async def test_config_flow_serial_resolution_oserror(
) -> None:
"""Test that OSError during serial port resolution is handled."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "manual_pick_radio_type"},
data={CONF_RADIO_TYPE: RadioType.ezsp.description},
discovery_info = UsbServiceInfo(
device="/dev/ttyZIGBEE",
pid="AAAA",
vid="AAAA",
serial_number="1234",
description="zigbee radio",
manufacturer="test",
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB33"},
)
assert result["type"] is FlowResultType.MENU
assert result["step_id"] == "choose_setup_strategy"
with (
patch(
"homeassistant.components.usb.get_serial_by_id",
"homeassistant.components.zha.config_flow.usb.get_serial_by_id",
side_effect=OSError("Test error"),
),
):
setup_result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={"next_step_id": config_flow.SETUP_STRATEGY_RECOMMENDED},
result_init = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USB}, data=discovery_info
)
assert setup_result["type"] is FlowResultType.ABORT
assert setup_result["reason"] == "cannot_resolve_path"
assert setup_result["description_placeholders"] == {"path": "/dev/ttyUSB33"}
assert result_init["type"] is FlowResultType.ABORT
assert result_init["reason"] == "cannot_resolve_path"
assert result_init["description_placeholders"] == {"path": "/dev/ttyZIGBEE"}
@patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee")

View File

@@ -12,8 +12,7 @@ async def test_zone_raises(hass: HomeAssistant) -> None:
"""Test that zone raises ConditionError on errors."""
config = {
"condition": "zone",
"entity_id": "device_tracker.cat",
"zone": "zone.home",
"options": {"entity_id": "device_tracker.cat", "zone": "zone.home"},
}
config = cv.CONDITION_SCHEMA(config)
config = await condition.async_validate_condition_config(hass, config)
@@ -66,8 +65,10 @@ async def test_zone_raises(hass: HomeAssistant) -> None:
config = {
"condition": "zone",
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
"zone": ["zone.home", "zone.work"],
"options": {
"entity_id": ["device_tracker.cat", "device_tracker.dog"],
"zone": ["zone.home", "zone.work"],
},
}
config = cv.CONDITION_SCHEMA(config)
config = await condition.async_validate_condition_config(hass, config)
@@ -102,8 +103,10 @@ async def test_zone_multiple_entities(hass: HomeAssistant) -> None:
{
"alias": "Zone Condition",
"condition": "zone",
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
"zone": "zone.home",
"options": {
"entity_id": ["device_tracker.person_1", "device_tracker.person_2"],
"zone": "zone.home",
},
},
],
}
@@ -161,8 +164,10 @@ async def test_multiple_zones(hass: HomeAssistant) -> None:
"conditions": [
{
"condition": "zone",
"entity_id": "device_tracker.person",
"zone": ["zone.home", "zone.work"],
"options": {
"entity_id": "device_tracker.person",
"zone": ["zone.home", "zone.work"],
},
},
],
}

View File

@@ -1,10 +1,12 @@
"""Test automation helpers."""
import pytest
import voluptuous as vol
from homeassistant.helpers.automation import (
get_absolute_description_key,
get_relative_description_key,
move_top_level_schema_fields_to_options,
)
@@ -34,3 +36,73 @@ def test_relative_description_key(relative_key: str, absolute_key: str) -> None:
"""Test relative description key."""
DOMAIN = "homeassistant"
assert get_relative_description_key(DOMAIN, absolute_key) == relative_key
@pytest.mark.parametrize(
("config", "schema_dict", "expected_config"),
[
(
{
"platform": "test",
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
"extra_field": "extra_value",
},
{},
{
"platform": "test",
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
"extra_field": "extra_value",
"options": {},
},
),
(
{
"platform": "test",
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
"extra_field": "extra_value",
},
{
vol.Required("entity"): str,
vol.Optional("from"): str,
vol.Optional("to"): str,
vol.Optional("for"): dict,
vol.Optional("attribute"): str,
vol.Optional("value_template"): str,
},
{
"platform": "test",
"extra_field": "extra_value",
"options": {
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
},
},
),
],
)
async def test_move_schema_fields_to_options(
config, schema_dict, expected_config
) -> None:
"""Test moving schema fields to options."""
assert (
move_top_level_schema_fields_to_options(config, schema_dict) == expected_config
)

View File

@@ -32,6 +32,13 @@ from homeassistant.helpers import (
entity_registry as er,
trace,
)
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
async_validate_condition_config,
)
from homeassistant.helpers.template import Template
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import Integration, async_get_integration
@@ -2105,12 +2112,9 @@ async def test_platform_async_get_conditions(hass: HomeAssistant) -> None:
async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
"""Test a condition platform with multiple conditions."""
class MockCondition(condition.Condition):
class MockCondition(Condition):
"""Mock condition."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
@@ -2118,23 +2122,24 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
"""Validate config."""
return config
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
class MockCondition1(MockCondition):
"""Mock condition 1."""
async def async_get_checker(self) -> condition.ConditionCheckerType:
async def async_get_checker(self) -> ConditionCheckerType:
"""Evaluate state based on configuration."""
return lambda hass, vars: True
class MockCondition2(MockCondition):
"""Mock condition 2."""
async def async_get_checker(self) -> condition.ConditionCheckerType:
async def async_get_checker(self) -> ConditionCheckerType:
"""Evaluate state based on configuration."""
return lambda hass, vars: False
async def async_get_conditions(
hass: HomeAssistant,
) -> dict[str, type[condition.Condition]]:
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
return {
"_": MockCondition1,
"cond_2": MockCondition2,
@@ -2148,12 +2153,12 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
config_1 = {CONF_CONDITION: "test"}
config_2 = {CONF_CONDITION: "test.cond_2"}
config_3 = {CONF_CONDITION: "test.unknown_cond"}
assert await condition.async_validate_condition_config(hass, config_1) == config_1
assert await condition.async_validate_condition_config(hass, config_2) == config_2
assert await async_validate_condition_config(hass, config_1) == config_1
assert await async_validate_condition_config(hass, config_2) == config_2
with pytest.raises(
vol.Invalid, match="Invalid condition 'test.unknown_cond' specified"
):
await condition.async_validate_condition_config(hass, config_3)
await async_validate_condition_config(hass, config_3)
cond_func = await condition.async_from_config(hass, config_1)
assert cond_func(hass, {}) is True
@@ -2165,6 +2170,74 @@ async def test_platform_multiple_conditions(hass: HomeAssistant) -> None:
await condition.async_from_config(hass, config_3)
async def test_platform_migrate_trigger(hass: HomeAssistant) -> None:
"""Test a condition platform with a migration."""
OPTIONS_SCHEMA_DICT = {
vol.Required("option_1"): str,
vol.Optional("option_2"): int,
}
class MockCondition(Condition):
"""Mock condition."""
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = move_top_level_schema_fields_to_options(
complete_config, OPTIONS_SCHEMA_DICT
)
return await super().async_validate_complete_config(hass, complete_config)
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return config
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
return {
"_": MockCondition,
}
mock_integration(hass, MockModule("test"))
mock_platform(
hass, "test.condition", Mock(async_get_conditions=async_get_conditions)
)
config_1 = {
"condition": "test",
"option_1": "value_1",
"option_2": 2,
}
config_2 = {
"condition": "test",
"option_1": "value_1",
}
config_1_migrated = {
"condition": "test",
"options": {"option_1": "value_1", "option_2": 2},
}
config_2_migrated = {
"condition": "test",
"options": {"option_1": "value_1"},
}
assert await async_validate_condition_config(hass, config_1) == config_1_migrated
assert await async_validate_condition_config(hass, config_2) == config_2_migrated
assert (
await async_validate_condition_config(hass, config_1_migrated)
== config_1_migrated
)
assert (
await async_validate_condition_config(hass, config_2_migrated)
== config_2_migrated
)
@pytest.mark.parametrize("enabled_value", [True, "{{ 1 == 1 }}"])
async def test_enabled_condition(
hass: HomeAssistant, enabled_value: bool | str

View File

@@ -19,6 +19,7 @@ from homeassistant.core import (
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import trigger
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
from homeassistant.helpers.trigger import (
DATA_PLUGGABLE_ACTIONS,
PluggableAction,
@@ -29,7 +30,6 @@ from homeassistant.helpers.trigger import (
_async_get_trigger_platform,
async_initialize_triggers,
async_validate_trigger_config,
move_top_level_schema_fields_to_options,
)
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import Integration, async_get_integration
@@ -449,76 +449,6 @@ async def test_pluggable_action(
assert not plug_2
@pytest.mark.parametrize(
("config", "schema_dict", "expected_config"),
[
(
{
"platform": "test",
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
"extra_field": "extra_value",
},
{},
{
"platform": "test",
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
"extra_field": "extra_value",
"options": {},
},
),
(
{
"platform": "test",
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
"extra_field": "extra_value",
},
{
vol.Required("entity"): str,
vol.Optional("from"): str,
vol.Optional("to"): str,
vol.Optional("for"): dict,
vol.Optional("attribute"): str,
vol.Optional("value_template"): str,
},
{
"platform": "test",
"extra_field": "extra_value",
"options": {
"entity": "sensor.test",
"from": "open",
"to": "closed",
"for": {"hours": 1},
"attribute": "state",
"value_template": "{{ value_json.val }}",
},
},
),
],
)
async def test_move_schema_fields_to_options(
config, schema_dict, expected_config
) -> None:
"""Test moving schema fields to options."""
assert (
move_top_level_schema_fields_to_options(config, schema_dict) == expected_config
)
async def test_platform_multiple_triggers(hass: HomeAssistant) -> None:
"""Test a trigger platform with multiple trigger."""