Compare commits

...

83 Commits

Author SHA1 Message Date
mib1185
53b81c5bbd improve tests 2025-12-14 11:44:54 +00:00
mib1185
37a61427de fix test name 2025-12-14 11:14:42 +00:00
mib1185
f8fd4b99fe use specific trigger mock 2025-12-14 11:12:30 +00:00
Michael
8785867c16 Merge branch 'dev' into fritzbox/add-support-for-routines 2025-12-14 11:28:05 +01:00
Michael
78af3acf35 Bump pyfritzhome to 0.6.18 (#158877) 2025-12-14 08:12:25 +01:00
Joost Lekkerkerker
b72f04d44e Add integration_type hub to electrasmart (#158942) 2025-12-14 07:17:50 +01:00
Joost Lekkerkerker
35f287e330 Add integration_type hub to ekeybionyx (#158941) 2025-12-14 07:16:40 +01:00
Joost Lekkerkerker
0a55f83b46 Add integration_type hub to econet (#158940) 2025-12-14 07:14:44 +01:00
Joost Lekkerkerker
5030d0ba90 Add integration_type device to ecoforest (#158939) 2025-12-14 07:13:44 +01:00
Joost Lekkerkerker
f582f06ee4 Add integration_type service to eafm (#158937) 2025-12-14 06:59:46 +01:00
Joost Lekkerkerker
662bada5d8 Add integration_type hub to duotecno (#158936) 2025-12-14 06:59:13 +01:00
Joost Lekkerkerker
3ca338dd25 Add integration_type device to dunehd (#158935) 2025-12-14 06:58:03 +01:00
Joost Lekkerkerker
9337a0e71b Add integration_type device to droplet (#158933) 2025-12-14 06:56:08 +01:00
Joost Lekkerkerker
ccbb00197d Add integration_type hub to drop_connect (#158932) 2025-12-14 06:55:22 +01:00
Joost Lekkerkerker
0f59c17e61 Add integration_type service to dexcom (#158928) 2025-12-14 06:42:34 +01:00
Joost Lekkerkerker
6253ade3e2 Add integration_type service to datadog (#158927) 2025-12-14 06:40:48 +01:00
Joost Lekkerkerker
e5890378a1 Add integration_type device to daikin (#158926) 2025-12-14 06:40:29 +01:00
Joost Lekkerkerker
b8ab0bcadf Add integration_type service to dnsip (#158930) 2025-12-13 16:25:51 -06:00
Joost Lekkerkerker
19cb827577 Add integration_type device to doorbird (#158931) 2025-12-13 16:23:37 -06:00
Joost Lekkerkerker
03676d7e5a Add integration_type hub to ecobee (#158938) 2025-12-13 16:23:15 -06:00
Magnus
13f3b49b96 Bump aioasuswrt 1.5.3 (#158882) 2025-12-13 22:43:21 +01:00
Allen Porter
90c8c56a06 Suppress roborock failures under some unavailability threshold (#158673)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-13 22:30:21 +01:00
Josef Zweck
afb9e18a7d Add brew by weight controls to lamarzocco (#158169) 2025-12-13 22:28:11 +01:00
Andrew Jackson
2c2934065f Bump aiomealie to 1.1.1 and statically define mealplan entry types (#158907) 2025-12-13 22:26:31 +01:00
mettolen
0bead67df9 Add device uptime to Airobot integration (#158516) 2025-12-13 22:20:52 +01:00
James Cole
2895849203 Update strings for Firefly III integration (#158911) 2025-12-13 22:20:30 +01:00
David Recordon
b2400708ac Add myself as a maintainer for Control4 (#158948) 2025-12-13 22:15:35 +01:00
mib1185
b5addcfc81 add tests 2025-12-13 18:37:44 +00:00
Anthony Garera
0bed9c20b3 Bump python-overseerr to 0.8.0 (#158924) 2025-12-13 19:31:21 +01:00
Brett Adams
d3fb7a7b87 Bump tesla-fleet-api to 1.2.7 (#158904) 2025-12-13 15:02:19 +01:00
mib1185
90e103beb9 add support to activte/deactivate routines/triggers 2025-12-13 11:30:59 +00:00
Bouwe Westerdijk
60dcca4143 Show Plugwise configuration-link on gateway only (#158094) 2025-12-13 11:38:23 +01:00
Paul Tarjan
01f498f239 Clarify previous state in total_increasing warning message (#158805) 2025-12-13 11:15:37 +01:00
Andre Lengwenus
15055b8e8e Fix race condition in LCN climate and cover entites (#158894) 2025-12-13 11:12:20 +01:00
Bouwe Westerdijk
6826619e12 Revert adding entity_category to Plugwise thermostat schedule select (#158901) 2025-12-13 11:08:17 +01:00
Joost Lekkerkerker
b50a8e04a8 Add integration_type hub to airtouch5 (#158834)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-13 10:47:27 +01:00
Joost Lekkerkerker
c6c67c5357 Add integration_type hub to blue_current (#158863) 2025-12-13 10:46:12 +01:00
Joost Lekkerkerker
c82803d1e2 Add integration_type hub to agent_dvr (#158829)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-13 10:45:09 +01:00
Joost Lekkerkerker
732b30f181 Add integration_type hub to airzone (#158835)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-13 10:44:05 +01:00
Joost Lekkerkerker
0e2e57a657 Add integration_type device to android_ip_webcam (#158838)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-13 10:42:39 +01:00
Joost Lekkerkerker
f00b0080a9 Add integration_type device to advantage_air (#158826)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-13 10:39:58 +01:00
Joost Lekkerkerker
ad970c1234 Add integration_type hub to cert_expiry (#158897) 2025-12-13 10:39:14 +01:00
Joost Lekkerkerker
02ec56bffa Add integration_type device to ccm15 (#158896)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-13 10:37:36 +01:00
Joost Lekkerkerker
8388c290bf Add integration_type hub to canary (#158895) 2025-12-13 09:41:01 +01:00
Joost Lekkerkerker
576ee99faf Add integration_type hub to control4 (#158900) 2025-12-13 09:36:36 +01:00
Joost Lekkerkerker
8a3534c345 Add integration_type service to coinbase (#158899) 2025-12-13 09:31:54 +01:00
Joost Lekkerkerker
e1e91c5568 Add integration_type service to cloudflare (#158898) 2025-12-13 09:31:25 +01:00
epenet
1e09bddb1d Cleanup deprecated alias in core (#158799) 2025-12-13 09:29:15 +01:00
Joost Lekkerkerker
90e4340595 Add integration_type hub to brunt (#158870)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2025-12-13 09:04:05 +01:00
Joost Lekkerkerker
120b17349c Add integration_type service to aussie_broadband (#158853) 2025-12-13 08:56:35 +01:00
Joost Lekkerkerker
8a26961304 Add integration_type service to aurora (#158852) 2025-12-13 08:56:01 +01:00
Joost Lekkerkerker
407b675080 Add integration_type device to atag (#158850) 2025-12-13 08:55:35 +01:00
Joost Lekkerkerker
274844271b Add integration_type hub to aseko_pool_live (#158849) 2025-12-13 08:54:46 +01:00
Joost Lekkerkerker
f11e4e7bda Add integration_type hub to aosmith (#158843) 2025-12-13 08:52:45 +01:00
Joost Lekkerkerker
96f8c39c6f Add integration_type device to anthemav (#158841) 2025-12-13 08:51:25 +01:00
Joost Lekkerkerker
77b79fef8d Add integration_type hub to anova (#158840) 2025-12-13 08:50:24 +01:00
mkmer
a0d2f285f3 blink: Remove mkmer as codeowner (#158884) 2025-12-13 08:45:13 +01:00
Joost Lekkerkerker
3aef05d1ec Add integration_type hub to airzone_cloud (#158836) 2025-12-13 08:43:57 +01:00
Joost Lekkerkerker
510e391ee4 Add integration_type device to airtouch4 (#158833) 2025-12-13 08:41:17 +01:00
Joost Lekkerkerker
54adfdd694 Add integration_type device to bluesound (#158865) 2025-12-13 08:38:48 +01:00
Joost Lekkerkerker
d45f920b4a Add integration_type service to amberelectric (#158837) 2025-12-13 08:37:16 +01:00
Joost Lekkerkerker
3080ef9a4a Add integration_type device to airthings_ble (#158832) 2025-12-13 08:36:06 +01:00
Joost Lekkerkerker
51cebb52f3 Add integration_type hub to airthings (#158831) 2025-12-13 08:34:28 +01:00
Joost Lekkerkerker
7b0d4c47b7 Add integration_type service to airnow (#158830) 2025-12-13 08:33:53 +01:00
Joost Lekkerkerker
a660ab3f97 Add integration_type service to aftership (#158828) 2025-12-13 08:32:31 +01:00
Joost Lekkerkerker
dd8fc16788 Add integration_type service to aemet (#158827) 2025-12-13 08:32:01 +01:00
Joost Lekkerkerker
2b0fab0468 Add integration_type service to brottsplatskartan (#158869) 2025-12-13 08:30:59 +01:00
Joost Lekkerkerker
3bb88ed433 Add integration_type hub to bosch_shc (#158868) 2025-12-13 08:30:04 +01:00
Joost Lekkerkerker
984385cd98 Add integration_type service to buienradar (#158871) 2025-12-13 08:27:55 +01:00
Joost Lekkerkerker
09de108676 Add integration_type service to caldav (#158872) 2025-12-13 08:26:40 +01:00
Joost Lekkerkerker
ebc7581718 Add integration_type hub to bmw_connected_drive (#158866) 2025-12-13 08:26:16 +01:00
Joost Lekkerkerker
e55162812d Add integration_type hub to blink (#158862) 2025-12-13 08:22:22 +01:00
Joost Lekkerkerker
aa6ccaa024 Add integration_type device to blebox (#158860) 2025-12-13 08:21:25 +01:00
Joost Lekkerkerker
e1b009a6de Add integration_type device to balboa (#158859) 2025-12-13 08:20:12 +01:00
Joost Lekkerkerker
91ddc525b0 Add integration_type service to azure_event_hub (#158857) 2025-12-13 08:18:56 +01:00
Joost Lekkerkerker
d7d7954ac2 Add integration_type service to azure_devops (#158856) 2025-12-13 08:18:35 +01:00
Joost Lekkerkerker
e87c260df7 Add integration_type service to azure_data_explorer (#158855) 2025-12-13 08:18:13 +01:00
Joost Lekkerkerker
5185c6cd68 Add integration_type hub to arve (#158848) 2025-12-13 08:17:37 +01:00
Joost Lekkerkerker
7599c918e2 Add integration_type hub to august (#158851) 2025-12-12 23:00:06 +01:00
Joost Lekkerkerker
fa7e22ec91 Add integration_type device to arcam_fmj (#158846) 2025-12-12 22:59:47 +01:00
Joost Lekkerkerker
606519e51b Add integration_type device to baf (#158858) 2025-12-12 22:59:28 +01:00
Joost Lekkerkerker
8e39e010f7 Add integration_type device to bluemaestro (#158864) 2025-12-12 22:59:13 +01:00
Joost Lekkerkerker
dc01cf49a0 Add integration_type hub to bond (#158867) 2025-12-12 22:58:57 +01:00
107 changed files with 1105 additions and 351 deletions

8
CODEOWNERS generated
View File

@@ -220,8 +220,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
/homeassistant/components/blebox/ @bbx-a @swistakm
/tests/components/blebox/ @bbx-a @swistakm
/homeassistant/components/blink/ @fronzbot @mkmer
/tests/components/blink/ @fronzbot @mkmer
/homeassistant/components/blink/ @fronzbot
/tests/components/blink/ @fronzbot
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
/homeassistant/components/bluemaestro/ @bdraco
@@ -308,8 +308,8 @@ build.json @home-assistant/supervisor
/tests/components/config/ @home-assistant/core
/homeassistant/components/configurator/ @home-assistant/core
/tests/components/configurator/ @home-assistant/core
/homeassistant/components/control4/ @lawtancool
/tests/components/control4/ @lawtancool
/homeassistant/components/control4/ @lawtancool @davidrecordon
/tests/components/control4/ @lawtancool @davidrecordon
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
/homeassistant/components/cookidoo/ @miaucl

View File

@@ -4,6 +4,7 @@
"codeowners": ["@Bre77"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/advantage_air",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["advantage_air"],
"requirements": ["advantage-air==0.4.4"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@Noltari"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aemet",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["aemet_opendata"],
"requirements": ["AEMET-OpenData==0.6.4"]

View File

@@ -4,6 +4,7 @@
"codeowners": [],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aftership",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["pyaftership==21.11.0"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@ispysoftware"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["agent"],
"requirements": ["agent-py==0.0.24"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@asymworks"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airnow",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["pyairnow"],
"requirements": ["pyairnow==1.3.1"]

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime, timedelta
from pyairobotrest.models import ThermostatStatus
@@ -23,6 +24,8 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.util.dt import utcnow
from homeassistant.util.variance import ignore_variance
from . import AirobotConfigEntry
from .entity import AirobotEntity
@@ -34,10 +37,15 @@ PARALLEL_UPDATES = 0
class AirobotSensorEntityDescription(SensorEntityDescription):
"""Describes Airobot sensor entity."""
value_fn: Callable[[ThermostatStatus], StateType]
value_fn: Callable[[ThermostatStatus], StateType | datetime]
supported_fn: Callable[[ThermostatStatus], bool] = lambda _: True
uptime_to_stable_datetime = ignore_variance(
lambda value: utcnow().replace(microsecond=0) - timedelta(seconds=value),
timedelta(minutes=2),
)
SENSOR_TYPES: tuple[AirobotSensorEntityDescription, ...] = (
AirobotSensorEntityDescription(
key="air_temperature",
@@ -96,6 +104,14 @@ SENSOR_TYPES: tuple[AirobotSensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda status: status.errors,
),
AirobotSensorEntityDescription(
key="device_uptime",
translation_key="device_uptime",
device_class=SensorDeviceClass.TIMESTAMP,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda status: uptime_to_stable_datetime(status.device_uptime),
entity_registry_enabled_default=False,
),
)
@@ -129,6 +145,6 @@ class AirobotSensor(AirobotEntity, SensorEntity):
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
@property
def native_value(self) -> StateType:
def native_value(self) -> StateType | datetime:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.data.status)

View File

@@ -17,6 +17,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/airthings",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["airthings"],
"requirements": ["airthings-cloud==0.2.0"]

View File

@@ -27,6 +27,7 @@
"config_flow": true,
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["airthings-ble==1.2.0"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@samsinnamon"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airtouch4",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["airtouch4pyapi"],
"requirements": ["airtouch4pyapi==1.0.5"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@danzel"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["airtouch5py"],
"requirements": ["airtouch5py==0.3.0"]

View File

@@ -9,6 +9,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/airzone",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioairzone"],
"requirements": ["aioairzone==1.0.4"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@Noltari"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.7.2"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@madpilot"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/amberelectric",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["amberelectric"],
"requirements": ["amberelectric==2.0.12"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@engrbm87"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/android_ip_webcam",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["pydroid-ipcam==3.0.0"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@Lash-L"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/anova",
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["anova_wifi"],
"requirements": ["anova-wifi==0.17.0"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@hyralex"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/anthemav",
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["anthemav"],
"requirements": ["anthemav==1.4.1"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@bdr99"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aosmith",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["py-aosmith==1.0.15"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@elupus"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["arcam"],
"requirements": ["arcam-fmj==1.8.2"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@ikalnyi"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/arve",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["asyncarve==0.1.1"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@milanmeu"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aseko_pool_live",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aioaseko"],
"requirements": ["aioaseko==1.0.0"]

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
"requirements": ["aioasuswrt==1.5.2", "asusrouter==1.21.3"]
"requirements": ["aioasuswrt==1.5.3", "asusrouter==1.21.3"]
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@MatsNL"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/atag",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pyatag"],
"requirements": ["pyatag==0.3.5.3"]

View File

@@ -27,6 +27,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/august",
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.2"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@djtimca"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aurora",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["auroranoaa"],
"requirements": ["auroranoaa==0.0.5"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@nickw444", "@Bre77"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["aussiebb"],
"requirements": ["pyaussiebb==0.1.5"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@kaareseras"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/azure_data_explorer",
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["azure"],
"requirements": ["azure-kusto-ingest==4.5.1", "azure-kusto-data[aio]==4.5.1"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@timmo001"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/azure_devops",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["aioazuredevops"],
"requirements": ["aioazuredevops==2.2.2"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@eavanvalkenburg"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/azure_event_hub",
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["azure"],
"requirements": ["azure-eventhub==5.11.1"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@bdraco", "@jfroy"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/baf",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["aiobafi6==0.9.0"],
"zeroconf": [

View File

@@ -12,6 +12,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/balboa",
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["pybalboa"],
"requirements": ["pybalboa==1.1.3"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@bbx-a", "@swistakm"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/blebox",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["blebox_uniapi"],
"requirements": ["blebox-uniapi==2.5.0"],

View File

@@ -1,7 +1,7 @@
{
"domain": "blink",
"name": "Blink",
"codeowners": ["@fronzbot", "@mkmer"],
"codeowners": ["@fronzbot"],
"config_flow": true,
"dhcp": [
{
@@ -18,6 +18,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/blink",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["blinkpy"],
"requirements": ["blinkpy==0.25.1"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@gleeuwen", "@NickKoepr", "@jtodorova23"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/blue_current",
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["bluecurrent_api"],
"requirements": ["bluecurrent-api==1.3.2"]

View File

@@ -11,6 +11,7 @@
"config_flow": true,
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["bluemaestro-ble==0.4.1"]
}

View File

@@ -5,6 +5,7 @@
"codeowners": ["@thrawnarn", "@LouisChrist"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bluesound",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["pyblu==2.0.5"],
"zeroconf": [

View File

@@ -4,6 +4,7 @@
"codeowners": ["@gerard33", "@rikroe"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["bimmer_connected"],
"requirements": ["bimmer-connected[china]==0.17.3"]

View File

@@ -14,6 +14,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/bond",
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["bond_async"],
"requirements": ["bond-async==0.2.1"],

View File

@@ -5,6 +5,7 @@
"codeowners": ["@tschamm"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["boschshcpy"],
"requirements": ["boschshcpy==0.2.107"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@gjohansson-ST"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/brottsplatskartan",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["brottsplatskartan"],
"requirements": ["brottsplatskartan==1.0.5"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@eavanvalkenburg"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/brunt",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["brunt"],
"requirements": ["brunt==1.2.0"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@mjj4791", "@ties", "@Robbie1221"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/buienradar",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["buienradar", "vincenty"],
"requirements": ["buienradar==1.0.6"]

View File

@@ -4,6 +4,7 @@
"codeowners": [],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/caldav",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["caldav", "vobject"],
"requirements": ["caldav==2.1.0", "icalendar==6.3.1", "vobject==0.9.9"]

View File

@@ -5,6 +5,7 @@
"config_flow": true,
"dependencies": ["ffmpeg"],
"documentation": "https://www.home-assistant.io/integrations/canary",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["canary"],
"requirements": ["py-canary==0.5.4"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@ocalvo"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/ccm15",
"integration_type": "hub",
"iot_class": "local_polling",
"requirements": ["py_ccm15==0.1.2"]
}

View File

@@ -4,5 +4,6 @@
"codeowners": ["@jjlawren"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/cert_expiry",
"integration_type": "service",
"iot_class": "cloud_polling"
}

View File

@@ -4,6 +4,7 @@
"codeowners": ["@ludeeus", "@ctalkington"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/cloudflare",
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["pycfdns"],
"requirements": ["pycfdns==3.0.0"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@tombrien"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/coinbase",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["coinbase"],
"requirements": ["coinbase-advanced-py==1.2.2"]

View File

@@ -1,9 +1,10 @@
{
"domain": "control4",
"name": "Control4",
"codeowners": ["@lawtancool"],
"codeowners": ["@lawtancool", "@davidrecordon"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/control4",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyControl4"],
"requirements": ["pyControl4==1.5.0"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@fredrike"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/daikin",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pydaikin"],
"requirements": ["pydaikin==2.17.1"],

View File

@@ -4,6 +4,7 @@
"codeowners": [],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/datadog",
"integration_type": "service",
"iot_class": "local_push",
"loggers": ["datadog"],
"quality_scale": "legacy",

View File

@@ -4,6 +4,7 @@
"codeowners": ["@gagebenne"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dexcom",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["pydexcom"],
"requirements": ["pydexcom==0.2.3"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@gjohansson-ST"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dnsip",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["aiodns==3.6.0"]
}

View File

@@ -5,6 +5,7 @@
"config_flow": true,
"dependencies": ["http", "repairs"],
"documentation": "https://www.home-assistant.io/integrations/doorbird",
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["doorbirdpy"],
"requirements": ["DoorBirdPy==3.0.11"],

View File

@@ -5,6 +5,7 @@
"config_flow": true,
"dependencies": ["mqtt"],
"documentation": "https://www.home-assistant.io/integrations/drop_connect",
"integration_type": "hub",
"iot_class": "local_push",
"mqtt": ["drop_connect/discovery/#"],
"requirements": ["dropmqttapi==1.0.3"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@sarahseidman"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/droplet",
"integration_type": "device",
"iot_class": "local_push",
"quality_scale": "bronze",
"requirements": ["pydroplet==2.3.4"],

View File

@@ -4,6 +4,7 @@
"codeowners": [],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/dunehd",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pdunehd"],
"requirements": ["pdunehd==1.3.2"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@cereal2nd"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/duotecno",
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["pyduotecno", "pyduotecno-node", "pyduotecno-unit"],
"requirements": ["pyDuotecno==2024.10.1"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@Jc2k"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/eafm",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["aioeafm"],
"requirements": ["aioeafm==0.1.2"]

View File

@@ -7,6 +7,7 @@
"homekit": {
"models": ["EB", "ecobee*"]
},
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["pyecobee"],
"requirements": ["python-ecobee-api==0.3.2"],

View File

@@ -4,6 +4,7 @@
"codeowners": ["@pjanuario"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/ecoforest",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pyecoforest"],
"requirements": ["pyecoforest==0.4.0"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@w1ll1am23"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/econet",
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["paho_mqtt", "pyeconet"],
"requirements": ["pyeconet==0.1.28"]

View File

@@ -5,6 +5,7 @@
"config_flow": true,
"dependencies": ["application_credentials", "http"],
"documentation": "https://www.home-assistant.io/integrations/ekeybionyx",
"integration_type": "hub",
"iot_class": "local_push",
"quality_scale": "bronze",
"requirements": ["ekey-bionyxpy==1.0.0"]

View File

@@ -4,6 +4,7 @@
"codeowners": ["@jafar-atili"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/electrasmart",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["pyElectra==1.2.4"]
}

View File

@@ -42,11 +42,11 @@
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"api_key": "The API key for authenticating with Firefly",
"api_key": "The API key for authenticating with Firefly III",
"url": "[%key:common::config_flow::data::url%]",
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
"verify_ssl": "Verify the SSL certificate of the Firefly III instance"
},
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
"description": "You can create an API key in the Firefly III UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
}
}
},
@@ -84,13 +84,13 @@
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while trying to connect to the Firefly instance: {error}"
"message": "An error occurred while trying to connect to the Firefly III instance: {error}"
},
"invalid_auth": {
"message": "An error occurred while trying to authenticate: {error}"
},
"timeout_connect": {
"message": "A timeout occurred while trying to connect to the Firefly instance: {error}"
"message": "A timeout occurred while trying to connect to the Firefly III instance: {error}"
}
}
}

View File

@@ -6,7 +6,7 @@ from dataclasses import dataclass
from datetime import timedelta
from pyfritzhome import Fritzhome, FritzhomeDevice, LoginError
from pyfritzhome.devicetypes import FritzhomeTemplate
from pyfritzhome.devicetypes import FritzhomeTemplate, FritzhomeTrigger
from requests.exceptions import ConnectionError as RequestConnectionError, HTTPError
from homeassistant.config_entries import ConfigEntry
@@ -27,6 +27,7 @@ class FritzboxCoordinatorData:
devices: dict[str, FritzhomeDevice]
templates: dict[str, FritzhomeTemplate]
triggers: dict[str, FritzhomeTrigger]
supported_color_properties: dict[str, tuple[dict, list]]
@@ -37,6 +38,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
configuration_url: str
fritz: Fritzhome
has_templates: bool
has_triggers: bool
def __init__(self, hass: HomeAssistant, config_entry: FritzboxConfigEntry) -> None:
"""Initialize the Fritzbox Smarthome device coordinator."""
@@ -50,8 +52,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
self.new_devices: set[str] = set()
self.new_templates: set[str] = set()
self.new_triggers: set[str] = set()
self.data = FritzboxCoordinatorData({}, {}, {})
self.data = FritzboxCoordinatorData({}, {}, {}, {})
async def async_setup(self) -> None:
"""Set up the coordinator."""
@@ -74,6 +77,11 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
)
LOGGER.debug("enable smarthome templates: %s", self.has_templates)
self.has_triggers = await self.hass.async_add_executor_job(
self.fritz.has_triggers
)
LOGGER.debug("enable smarthome triggers: %s", self.has_triggers)
self.configuration_url = self.fritz.get_prefixed_host()
await self.async_config_entry_first_refresh()
@@ -92,7 +100,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
available_main_ains = [
ain
for ain, dev in data.devices.items() | data.templates.items()
for ain, dev in data.devices.items()
| data.templates.items()
| data.triggers.items()
if dev.device_and_unit_id[1] is None
]
device_reg = dr.async_get(self.hass)
@@ -112,6 +122,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
except RequestConnectionError as ex:
raise UpdateFailed from ex
except HTTPError:
@@ -123,6 +136,8 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
devices = self.fritz.get_devices()
device_data = {}
@@ -156,12 +171,20 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
for template in templates:
template_data[template.ain] = template
trigger_data = {}
if self.has_triggers:
triggers = self.fritz.get_triggers()
for trigger in triggers:
trigger_data[trigger.ain] = trigger
self.new_devices = device_data.keys() - self.data.devices.keys()
self.new_templates = template_data.keys() - self.data.templates.keys()
self.new_triggers = trigger_data.keys() - self.data.triggers.keys()
return FritzboxCoordinatorData(
devices=device_data,
templates=template_data,
triggers=trigger_data,
supported_color_properties=supported_color_properties,
)
@@ -193,6 +216,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
if (
self.data.devices.keys() - new_data.devices.keys()
or self.data.templates.keys() - new_data.templates.keys()
or self.data.triggers.keys() - new_data.triggers.keys()
):
self.cleanup_removed_devices(new_data)

View File

@@ -7,7 +7,7 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyfritzhome"],
"requirements": ["pyfritzhome==0.6.17"],
"requirements": ["pyfritzhome==0.6.18"],
"ssdp": [
{
"st": "urn:schemas-upnp-org:device:fritzbox:1"

View File

@@ -4,14 +4,17 @@ from __future__ import annotations
from typing import Any
from pyfritzhome.devicetypes import FritzhomeTrigger
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import FritzboxConfigEntry
from .entity import FritzBoxDeviceEntity
from .entity import FritzBoxDeviceEntity, FritzBoxEntity
# Coordinator handles data updates, so we can allow unlimited parallel updates
PARALLEL_UPDATES = 0
@@ -26,21 +29,27 @@ async def async_setup_entry(
coordinator = entry.runtime_data
@callback
def _add_entities(devices: set[str] | None = None) -> None:
"""Add devices."""
def _add_entities(
devices: set[str] | None = None, triggers: set[str] | None = None
) -> None:
"""Add devices and triggers."""
if devices is None:
devices = coordinator.new_devices
if not devices:
if triggers is None:
triggers = coordinator.new_triggers
if not devices and not triggers:
return
async_add_entities(
entities = [
FritzboxSwitch(coordinator, ain)
for ain in devices
if coordinator.data.devices[ain].has_switch
)
] + [FritzboxTrigger(coordinator, ain) for ain in triggers]
async_add_entities(entities)
entry.async_on_unload(coordinator.async_add_listener(_add_entities))
_add_entities(set(coordinator.data.devices))
_add_entities(set(coordinator.data.devices), set(coordinator.data.triggers))
class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
@@ -70,3 +79,42 @@ class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
translation_domain=DOMAIN,
translation_key="manual_switching_disabled",
)
class FritzboxTrigger(FritzBoxEntity, SwitchEntity):
"""The switch class for FRITZ!SmartHome triggers."""
@property
def data(self) -> FritzhomeTrigger:
"""Return the trigger data entity."""
return self.coordinator.data.triggers[self.ain]
@property
def device_info(self) -> DeviceInfo:
"""Return device specific attributes."""
return DeviceInfo(
name=self.data.name,
identifiers={(DOMAIN, self.ain)},
configuration_url=self.coordinator.configuration_url,
manufacturer="FRITZ!",
model="SmartHome Routine",
)
@property
def is_on(self) -> bool:
"""Return true if the trigger is active."""
return self.data.active # type: ignore [no-any-return]
async def async_turn_on(self, **kwargs: Any) -> None:
"""Activate the trigger."""
await self.hass.async_add_executor_job(
self.coordinator.fritz.set_trigger_active, self.ain
)
await self.coordinator.async_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Deactivate the trigger."""
await self.hass.async_add_executor_job(
self.coordinator.fritz.set_trigger_inactive, self.ain
)
await self.coordinator.async_refresh()

View File

@@ -28,6 +28,9 @@
}
},
"number": {
"bbw_dose": {
"default": "mdi:weight-gram"
},
"coffee_temp": {
"default": "mdi:thermometer-water"
},
@@ -51,6 +54,14 @@
}
},
"select": {
"bbw_dose_mode": {
"default": "mdi:all-inclusive-box",
"state": {
"continuous": "mdi:all-inclusive-box",
"dose1": "mdi:numeric-1-box",
"dose2": "mdi:numeric-2-box"
}
},
"prebrew_infusion_select": {
"default": "mdi:water-pump-off",
"state": {

View File

@@ -5,9 +5,14 @@ from dataclasses import dataclass
from typing import Any, cast
from pylamarzocco import LaMarzoccoMachine
from pylamarzocco.const import ModelName, PreExtractionMode, WidgetType
from pylamarzocco.const import DoseMode, ModelName, PreExtractionMode, WidgetType
from pylamarzocco.exceptions import RequestNotSuccessful
from pylamarzocco.models import CoffeeBoiler, PreBrewing, SteamBoilerTemperature
from pylamarzocco.models import (
BrewByWeightDoses,
CoffeeBoiler,
PreBrewing,
SteamBoilerTemperature,
)
from homeassistant.components.number import (
NumberDeviceClass,
@@ -18,6 +23,7 @@ from homeassistant.const import (
PRECISION_TENTHS,
PRECISION_WHOLE,
EntityCategory,
UnitOfMass,
UnitOfTemperature,
UnitOfTime,
)
@@ -219,6 +225,72 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
)
),
),
LaMarzoccoNumberEntityDescription(
key="bbw_dose_1",
translation_key="bbw_dose",
translation_placeholders={"dose": "Dose 1"},
device_class=NumberDeviceClass.WEIGHT,
native_unit_of_measurement=UnitOfMass.GRAMS,
native_step=PRECISION_TENTHS,
native_min_value=5,
native_max_value=100,
entity_category=EntityCategory.CONFIG,
set_value_fn=(
lambda machine, value: machine.set_brew_by_weight_dose(
dose=DoseMode.DOSE_1,
value=value,
)
),
native_value_fn=(
lambda machine: cast(
BrewByWeightDoses,
machine.dashboard.config[WidgetType.CM_BREW_BY_WEIGHT_DOSES],
).doses.dose_1.dose
),
available_fn=lambda coordinator: (
cast(
BrewByWeightDoses,
coordinator.device.dashboard.config[WidgetType.CM_BREW_BY_WEIGHT_DOSES],
).scale_connected
),
supported_fn=(
lambda coordinator: coordinator.device.dashboard.model_name
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
),
),
LaMarzoccoNumberEntityDescription(
key="bbw_dose_2",
translation_key="bbw_dose",
translation_placeholders={"dose": "Dose 2"},
device_class=NumberDeviceClass.WEIGHT,
native_unit_of_measurement=UnitOfMass.GRAMS,
native_step=PRECISION_TENTHS,
native_min_value=5,
native_max_value=100,
entity_category=EntityCategory.CONFIG,
set_value_fn=(
lambda machine, value: machine.set_brew_by_weight_dose(
dose=DoseMode.DOSE_2,
value=value,
)
),
native_value_fn=(
lambda machine: cast(
BrewByWeightDoses,
machine.dashboard.config[WidgetType.CM_BREW_BY_WEIGHT_DOSES],
).doses.dose_2.dose
),
available_fn=lambda coordinator: (
cast(
BrewByWeightDoses,
coordinator.device.dashboard.config[WidgetType.CM_BREW_BY_WEIGHT_DOSES],
).scale_connected
),
supported_fn=(
lambda coordinator: coordinator.device.dashboard.model_name
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
),
),
)

View File

@@ -5,6 +5,7 @@ from dataclasses import dataclass
from typing import Any, cast
from pylamarzocco.const import (
DoseMode,
ModelName,
PreExtractionMode,
SmartStandByType,
@@ -13,7 +14,7 @@ from pylamarzocco.const import (
)
from pylamarzocco.devices import LaMarzoccoMachine
from pylamarzocco.exceptions import RequestNotSuccessful
from pylamarzocco.models import PreBrewing, SteamBoilerLevel
from pylamarzocco.models import BrewByWeightDoses, PreBrewing, SteamBoilerLevel
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.const import EntityCategory
@@ -50,6 +51,14 @@ STANDBY_MODE_HA_TO_LM = {
STANDBY_MODE_LM_TO_HA = {value: key for key, value in STANDBY_MODE_HA_TO_LM.items()}
DOSE_MODE_HA_TO_LM = {
"continuous": DoseMode.CONTINUOUS,
"dose1": DoseMode.DOSE_1,
"dose2": DoseMode.DOSE_2,
}
DOSE_MODE_LM_TO_HA = {value: key for key, value in DOSE_MODE_HA_TO_LM.items()}
@dataclass(frozen=True, kw_only=True)
class LaMarzoccoSelectEntityDescription(
@@ -117,6 +126,31 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = (
machine.schedule.smart_wake_up_sleep.smart_stand_by_after
],
),
LaMarzoccoSelectEntityDescription(
key="bbw_dose_mode",
translation_key="bbw_dose_mode",
entity_category=EntityCategory.CONFIG,
options=["continuous", "dose1", "dose2"],
select_option_fn=lambda machine, option: machine.set_brew_by_weight_dose_mode(
mode=DOSE_MODE_HA_TO_LM[option]
),
current_option_fn=lambda machine: DOSE_MODE_LM_TO_HA[
cast(
BrewByWeightDoses,
machine.dashboard.config[WidgetType.CM_BREW_BY_WEIGHT_DOSES],
).mode
],
available_fn=lambda coordinator: (
cast(
BrewByWeightDoses,
coordinator.device.dashboard.config[WidgetType.CM_BREW_BY_WEIGHT_DOSES],
).scale_connected
),
supported_fn=(
lambda coordinator: coordinator.device.dashboard.model_name
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
),
),
)

View File

@@ -87,6 +87,9 @@
}
},
"number": {
"bbw_dose": {
"name": "Brew by weight {dose}"
},
"coffee_temp": {
"name": "Coffee target temperature"
},
@@ -107,6 +110,14 @@
}
},
"select": {
"bbw_dose_mode": {
"name": "Brew by weight dose mode",
"state": {
"continuous": "Continuous",
"dose1": "Dose 1",
"dose2": "Dose 2"
}
},
"prebrew_infusion_select": {
"name": "Prebrew/-infusion mode",
"state": {

View File

@@ -1,6 +1,5 @@
"""Support for LCN climate control."""
import asyncio
from collections.abc import Iterable
from datetime import timedelta
from functools import partial
@@ -172,14 +171,14 @@ class LcnClimate(LcnEntity, ClimateEntity):
async def async_update(self) -> None:
"""Update the state of the entity."""
self._attr_available = any(
await asyncio.gather(
self.device_connection.request_status_variable(
[
await self.device_connection.request_status_variable(
self.variable, SCAN_INTERVAL.seconds
),
self.device_connection.request_status_variable(
await self.device_connection.request_status_variable(
self.setpoint, SCAN_INTERVAL.seconds
),
)
]
)
def input_received(self, input_obj: InputType) -> None:

View File

@@ -1,6 +1,5 @@
"""Support for LCN covers."""
import asyncio
from collections.abc import Coroutine, Iterable
from datetime import timedelta
from functools import partial
@@ -134,14 +133,14 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
"""Update the state of the entity."""
if not self.device_connection.is_group:
self._attr_available = any(
await asyncio.gather(
self.device_connection.request_status_output(
[
await self.device_connection.request_status_output(
pypck.lcn_defs.OutputPort["OUTPUTUP"], SCAN_INTERVAL.seconds
),
self.device_connection.request_status_output(
await self.device_connection.request_status_output(
pypck.lcn_defs.OutputPort["OUTPUTDOWN"], SCAN_INTERVAL.seconds
),
)
]
)
def input_received(self, input_obj: InputType) -> None:
@@ -274,7 +273,7 @@ class LcnRelayCover(LcnEntity, CoverEntity):
self.motor, self.positioning_mode, SCAN_INTERVAL.seconds
)
)
self._attr_available = any(await asyncio.gather(*coros))
self._attr_available = any([await coro for coro in coros])
def input_received(self, input_obj: InputType) -> None:
"""Set cover states when LCN input object (command) is received."""

View File

@@ -15,6 +15,13 @@ from .entity import MealieEntity
PARALLEL_UPDATES = 0
SUPPORTED_MEALPLAN_ENTRY_TYPES = [
MealplanEntryType.BREAKFAST,
MealplanEntryType.DINNER,
MealplanEntryType.LUNCH,
MealplanEntryType.SIDE,
]
async def async_setup_entry(
hass: HomeAssistant,
@@ -26,7 +33,7 @@ async def async_setup_entry(
async_add_entities(
MealieMealplanCalendarEntity(coordinator, entry_type)
for entry_type in MealplanEntryType
for entry_type in SUPPORTED_MEALPLAN_ENTRY_TYPES
)

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["aiomealie==1.1.0"]
"requirements": ["aiomealie==1.1.1"]
}

View File

@@ -9,5 +9,5 @@
"integration_type": "service",
"iot_class": "local_push",
"quality_scale": "platinum",
"requirements": ["python-overseerr==0.7.1"]
"requirements": ["python-overseerr==0.8.0"]
}

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from plugwise.constants import GwEntityData
from plugwise import GwEntityData
from homeassistant.const import ATTR_NAME, ATTR_VIA_DEVICE, CONF_HOST
from homeassistant.helpers.device_registry import (
@@ -30,37 +30,43 @@ class PlugwiseEntity(CoordinatorEntity[PlugwiseDataUpdateCoordinator]):
super().__init__(coordinator)
self._dev_id = device_id
configuration_url: str | None = None
if entry := self.coordinator.config_entry:
configuration_url = f"http://{entry.data[CONF_HOST]}"
api = coordinator.api
gateway_id = api.gateway_id
entry = coordinator.config_entry
data = coordinator.data[device_id]
# Link configuration-URL for the gateway device
configuration_url = (
f"http://{entry.data[CONF_HOST]}"
if device_id == gateway_id and entry
else None
)
# Build connections set
connections = set()
if mac := data.get("mac_address"):
if mac := self.device.get("mac_address"):
connections.add((CONNECTION_NETWORK_MAC, mac))
if mac := data.get("zigbee_mac_address"):
connections.add((CONNECTION_ZIGBEE, mac))
if zigbee_mac := self.device.get("zigbee_mac_address"):
connections.add((CONNECTION_ZIGBEE, zigbee_mac))
# Set base device info
self._attr_device_info = DeviceInfo(
configuration_url=configuration_url,
identifiers={(DOMAIN, device_id)},
connections=connections,
manufacturer=data.get("vendor"),
model=data.get("model"),
model_id=data.get("model_id"),
name=coordinator.api.smile.name,
sw_version=data.get("firmware"),
hw_version=data.get("hardware"),
manufacturer=self.device.get("vendor"),
model=self.device.get("model"),
model_id=self.device.get("model_id"),
name=api.smile.name,
sw_version=self.device.get("firmware"),
hw_version=self.device.get("hardware"),
)
if device_id != coordinator.api.gateway_id:
# Add extra info if not the gateway device
if device_id != gateway_id:
self._attr_device_info.update(
{
ATTR_NAME: data.get(ATTR_NAME),
ATTR_VIA_DEVICE: (
DOMAIN,
str(self.coordinator.api.gateway_id),
),
ATTR_NAME: self.device.get(ATTR_NAME),
ATTR_VIA_DEVICE: (DOMAIN, gateway_id),
}
)

View File

@@ -37,7 +37,6 @@ SELECT_TYPES = (
PlugwiseSelectEntityDescription(
key=SELECT_SCHEDULE,
translation_key=SELECT_SCHEDULE,
entity_category=EntityCategory.CONFIG,
options_key="available_schedules",
),
PlugwiseSelectEntityDescription(

View File

@@ -43,6 +43,12 @@ from .models import DeviceState
SCAN_INTERVAL = timedelta(seconds=30)
# Roborock devices have a known issue where they go offline for a short period
# around 3AM local time for ~1 minute and reset both the local connection
# and MQTT connection. To avoid log spam, we will avoid reporting failures refreshing
# data until this duration has passed.
MIN_UNAVAILABLE_DURATION = timedelta(minutes=2)
_LOGGER = logging.getLogger(__name__)
@@ -102,6 +108,9 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
# Keep track of last attempt to refresh maps/rooms to know when to try again.
self._last_home_update_attempt: datetime
self.last_home_update: datetime | None = None
# Tracks the last successful update to control when we report failure
# to the base class. This is reset on successful data update.
self._last_update_success_time: datetime | None = None
@cached_property
def dock_device_info(self) -> DeviceInfo:
@@ -169,7 +178,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
self.last_home_update = dt_util.utcnow()
async def _verify_api(self) -> None:
"""Verify that the api is reachable. If it is not, switch clients."""
"""Verify that the api is reachable."""
if self._device.is_connected:
if self._device.is_local_connected:
async_delete_issue(
@@ -217,26 +226,27 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
try:
# Update device props and standard api information
await self._update_device_prop()
except UpdateFailed:
if self._should_suppress_update_failure():
_LOGGER.debug(
"Suppressing update failure until unavailable duration passed"
)
return self.data
raise
# If the vacuum is currently cleaning and it has been IMAGE_CACHE_INTERVAL
# since the last map update, you can update the map.
new_status = self.properties_api.status
if (
new_status.in_cleaning
and (dt_util.utcnow() - self._last_home_update_attempt)
> IMAGE_CACHE_INTERVAL
) or self.last_update_state != new_status.state_name:
self._last_home_update_attempt = dt_util.utcnow()
try:
await self.update_map()
except HomeAssistantError as err:
_LOGGER.debug("Failed to update map: %s", err)
except RoborockException as ex:
_LOGGER.debug("Failed to update data: %s", ex)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_data_fail",
) from ex
# If the vacuum is currently cleaning and it has been IMAGE_CACHE_INTERVAL
# since the last map update, you can update the map.
new_status = self.properties_api.status
if (
new_status.in_cleaning
and (dt_util.utcnow() - self._last_home_update_attempt)
> IMAGE_CACHE_INTERVAL
) or self.last_update_state != new_status.state_name:
self._last_home_update_attempt = dt_util.utcnow()
try:
await self.update_map()
except HomeAssistantError as err:
_LOGGER.debug("Failed to update map: %s", err)
if self.properties_api.status.in_cleaning:
if self._device.is_local_connected:
@@ -248,6 +258,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
else:
self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL
self.last_update_state = self.properties_api.status.state_name
self._last_update_success_time = dt_util.utcnow()
_LOGGER.debug("Data update successful %s", self._last_update_success_time)
return DeviceState(
status=self.properties_api.status,
dnd_timer=self.properties_api.dnd,
@@ -255,6 +267,23 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
clean_summary=self.properties_api.clean_summary,
)
def _should_suppress_update_failure(self) -> bool:
"""Determine if we should suppress update failure reporting.
We suppress reporting update failures until a minimum duration has
passed since the last successful update. This is used to avoid reporting
the device as unavailable for short periods, a known issue.
The intent is to apply to routine background state refreshes and not
other failures such as the first update or map updates.
"""
if self._last_update_success_time is None:
# Never had a successful update, do not suppress
return False
failure_duration = dt_util.utcnow() - self._last_update_success_time
_LOGGER.debug("Update failure duration: %s", failure_duration)
return failure_duration < MIN_UNAVAILABLE_DURATION
async def get_routines(self) -> list[HomeDataScene]:
"""Get routines."""
try:

View File

@@ -416,8 +416,8 @@ def warn_dip(
_LOGGER.warning(
(
"Entity %s %shas state class total_increasing, but its state is not"
" strictly increasing. Triggered by state %s (%s) with last_updated set"
" to %s. Please %s"
" strictly increasing. Triggered by state %s (previous state: %s) with"
" last_updated set to %s. Please %s"
),
entity_id,
f"from integration {domain} " if domain else "",

View File

@@ -8,5 +8,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==1.2.5"]
"requirements": ["tesla-fleet-api==1.2.7"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==1.2.5", "teslemetry-stream==0.7.10"]
"requirements": ["tesla-fleet-api==1.2.7", "teslemetry-stream==0.7.10"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tessie", "tesla-fleet-api"],
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.2.5"]
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.2.7"]
}

View File

@@ -84,12 +84,6 @@ from .exceptions import (
ServiceValidationError,
Unauthorized,
)
from .helpers.deprecation import (
DeferredDeprecatedAlias,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from .helpers.json import json_bytes, json_fragment
from .helpers.typing import VolSchemaType
from .util import dt as dt_util
@@ -161,18 +155,6 @@ class EventStateReportedData(EventStateEventData):
old_last_reported: datetime.datetime
def _deprecated_core_config() -> Any:
from . import core_config # noqa: PLC0415
return core_config.Config
# The Config class was moved to core_config in Home Assistant 2024.11
_DEPRECATED_Config = DeferredDeprecatedAlias(
_deprecated_core_config, "homeassistant.core_config.Config", "2025.11"
)
# How long to wait until things that run on startup have to finish.
TIMEOUT_EVENT_START = 15
@@ -280,6 +262,8 @@ def async_get_hass_or_none() -> HomeAssistant | None:
class ReleaseChannel(enum.StrEnum):
"""Release channel."""
BETA = "beta"
DEV = "dev"
NIGHTLY = "nightly"
@@ -2883,11 +2867,3 @@ class ServiceRegistry:
if TYPE_CHECKING:
target = cast(Callable[..., ServiceResponse], target)
return await self._hass.async_add_executor_job(target, service_call)
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = functools.partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = functools.partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -79,7 +79,7 @@
},
"aemet": {
"name": "AEMET OpenData",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -95,7 +95,7 @@
},
"aftership": {
"name": "AfterShip",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -119,7 +119,7 @@
},
"airnow": {
"name": "AirNow",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -151,7 +151,7 @@
"name": "Airthings"
},
"airthings_ble": {
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling",
"name": "Airthings BLE"
@@ -160,7 +160,7 @@
},
"airtouch4": {
"name": "AirTouch 4",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -276,7 +276,7 @@
},
"amberelectric": {
"name": "Amber Electric",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -323,7 +323,7 @@
},
"android_ip_webcam": {
"name": "Android IP Webcam",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -359,7 +359,7 @@
},
"anthemav": {
"name": "Anthem A/V Receivers",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_push"
},
@@ -505,7 +505,7 @@
},
"arcam_fmj": {
"name": "Arcam FMJ Receivers",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -569,7 +569,7 @@
},
"atag": {
"name": "Atag",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -608,7 +608,7 @@
}
},
"aurora": {
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -620,7 +620,7 @@
},
"aussie_broadband": {
"name": "Aussie Broadband",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -663,7 +663,7 @@
},
"baf": {
"name": "Big Ass Fans",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_push"
},
@@ -680,7 +680,7 @@
},
"balboa": {
"name": "Balboa Spa Client",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_push"
},
@@ -738,7 +738,7 @@
},
"blebox": {
"name": "BleBox devices",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -778,13 +778,13 @@
},
"bluemaestro": {
"name": "BlueMaestro",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_push"
},
"bluesound": {
"name": "Bluesound",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -865,7 +865,7 @@
},
"brottsplatskartan": {
"name": "Brottsplatskartan",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -928,7 +928,7 @@
},
"buienradar": {
"name": "Buienradar",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -939,7 +939,7 @@
},
"caldav": {
"name": "CalDAV",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -963,7 +963,7 @@
"iot_class": "local_polling"
},
"cert_expiry": {
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -1039,7 +1039,7 @@
},
"cloudflare": {
"name": "Cloudflare",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_push",
"single_config_entry": true
@@ -1063,7 +1063,7 @@
},
"coinbase": {
"name": "Coinbase",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -1194,7 +1194,7 @@
},
"daikin": {
"name": "Daikin AC",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -1206,7 +1206,7 @@
},
"datadog": {
"name": "Datadog",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "local_push"
},
@@ -1326,7 +1326,7 @@
},
"dexcom": {
"name": "Dexcom",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -1395,7 +1395,7 @@
},
"dnsip": {
"name": "DNS IP",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -1407,7 +1407,7 @@
},
"doorbird": {
"name": "DoorBird",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_push"
},
@@ -1448,7 +1448,7 @@
},
"droplet": {
"name": "Droplet",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_push"
},
@@ -1484,7 +1484,7 @@
},
"dunehd": {
"name": "Dune HD",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -1508,7 +1508,7 @@
},
"eafm": {
"name": "Environment Agency Flood Gauges",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling"
},
@@ -1551,7 +1551,7 @@
},
"ecoforest": {
"name": "Ecoforest",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_polling"
},
@@ -3982,19 +3982,19 @@
"name": "Microsoft",
"integrations": {
"azure_data_explorer": {
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_push",
"name": "Azure Data Explorer"
},
"azure_devops": {
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling",
"name": "Azure DevOps"
},
"azure_event_hub": {
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_push",
"name": "Azure Event Hub"

10
requirements_all.txt generated
View File

@@ -206,7 +206,7 @@ aioaquacell==0.2.0
aioaseko==1.0.0
# homeassistant.components.asuswrt
aioasuswrt==1.5.2
aioasuswrt==1.5.3
# homeassistant.components.husqvarna_automower
aioautomower==2.7.1
@@ -319,7 +319,7 @@ aiolookin==1.0.0
aiolyric==2.0.2
# homeassistant.components.mealie
aiomealie==1.1.0
aiomealie==1.1.1
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -2061,7 +2061,7 @@ pyforked-daapd==0.1.14
pyfreedompro==1.1.0
# homeassistant.components.fritzbox
pyfritzhome==0.6.17
pyfritzhome==0.6.18
# homeassistant.components.ifttt
pyfttt==0.3
@@ -2560,7 +2560,7 @@ python-opensky==1.0.1
python-otbr-api==2.7.0
# homeassistant.components.overseerr
python-overseerr==0.7.1
python-overseerr==0.8.0
# homeassistant.components.picnic
python-picnic-api2==1.3.1
@@ -2978,7 +2978,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
tesla-fleet-api==1.2.5
tesla-fleet-api==1.2.7
# homeassistant.components.powerwall
tesla-powerwall==0.5.2

View File

@@ -197,7 +197,7 @@ aioaquacell==0.2.0
aioaseko==1.0.0
# homeassistant.components.asuswrt
aioasuswrt==1.5.2
aioasuswrt==1.5.3
# homeassistant.components.husqvarna_automower
aioautomower==2.7.1
@@ -304,7 +304,7 @@ aiolookin==1.0.0
aiolyric==2.0.2
# homeassistant.components.mealie
aiomealie==1.1.0
aiomealie==1.1.1
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -1741,7 +1741,7 @@ pyforked-daapd==0.1.14
pyfreedompro==1.1.0
# homeassistant.components.fritzbox
pyfritzhome==0.6.17
pyfritzhome==0.6.18
# homeassistant.components.ifttt
pyfttt==0.3
@@ -2144,7 +2144,7 @@ python-opensky==1.0.1
python-otbr-api==2.7.0
# homeassistant.components.overseerr
python-overseerr==0.7.1
python-overseerr==0.8.0
# homeassistant.components.picnic
python-picnic-api2==1.3.1
@@ -2478,7 +2478,7 @@ temperusb==1.6.1
# homeassistant.components.tesla_fleet
# homeassistant.components.teslemetry
# homeassistant.components.tessie
tesla-fleet-api==1.2.5
tesla-fleet-api==1.2.7
# homeassistant.components.powerwall
tesla-powerwall==0.5.2

View File

@@ -55,6 +55,55 @@
'state': '22.0',
})
# ---
# name: test_sensors[sensor.test_thermostat_device_uptime-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_thermostat_device_uptime',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
'original_icon': None,
'original_name': 'Device uptime',
'platform': 'airobot',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'device_uptime',
'unique_id': 'T01A1B2C3_device_uptime',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[sensor.test_thermostat_device_uptime-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'timestamp',
'friendly_name': 'Test Thermostat Device uptime',
}),
'context': <ANY>,
'entity_id': 'sensor.test_thermostat_device_uptime',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '2023-12-31T21:13:20+00:00',
})
# ---
# name: test_sensors[sensor.test_thermostat_error_count-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -16,6 +16,7 @@ def platforms() -> list[Platform]:
return [Platform.SENSOR]
@pytest.mark.freeze_time("2024-01-01 00:00:00+00:00")
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
async def test_sensors(
hass: HomeAssistant,

View File

@@ -25,6 +25,7 @@ async def setup_config_entry(
device: Mock | None = None,
fritz: Mock | None = None,
template: Mock | None = None,
trigger: Mock | None = None,
) -> MockConfigEntry:
"""Do setup of a MockConfigEntry."""
entry = MockConfigEntry(
@@ -39,6 +40,9 @@ async def setup_config_entry(
if template is not None and fritz is not None:
fritz().get_templates.return_value = [template]
if trigger is not None and fritz is not None:
fritz().get_triggers.return_value = [trigger]
await hass.config_entries.async_setup(entry.entry_id)
if device is not None:
await hass.async_block_till_done()
@@ -46,7 +50,10 @@ async def setup_config_entry(
def set_devices(
fritz: Mock, devices: list[Mock] | None = None, templates: list[Mock] | None = None
fritz: Mock,
devices: list[Mock] | None = None,
templates: list[Mock] | None = None,
triggers: list[Mock] | None = None,
) -> None:
"""Set list of devices or templates."""
if devices is not None:
@@ -55,6 +62,9 @@ def set_devices(
if templates is not None:
fritz().get_templates.return_value = templates
if triggers is not None:
fritz().get_triggers.return_value = triggers
class FritzEntityBaseMock(Mock):
"""base mock of a AVM Fritz!Box binary sensor device."""
@@ -199,3 +209,11 @@ class FritzDeviceCoverUnknownPositionMock(FritzDeviceCoverMock):
"""Mock of a AVM Fritz!Box cover device with unknown position."""
levelpercentage = None
class FritzTriggerMock(FritzEntityBaseMock):
"""Mock of a AVM Fritz!Box smarthome trigger."""
active = True
ain = "trg1234 56789"
name = "fake_trigger"

View File

@@ -47,3 +47,51 @@
'state': 'on',
})
# ---
# name: test_setup[switch.fake_trigger-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.fake_trigger',
'has_entity_name': False,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'fake_trigger',
'platform': 'fritzbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': 'trg1234 56789',
'unit_of_measurement': None,
})
# ---
# name: test_setup[switch.fake_trigger-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'fake_trigger',
}),
'context': <ANY>,
'entity_id': 'switch.fake_trigger',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---

View File

@@ -23,12 +23,13 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er
from homeassistant.util import dt as dt_util
from . import FritzDeviceSwitchMock, set_devices, setup_config_entry
from . import FritzDeviceSwitchMock, FritzTriggerMock, set_devices, setup_config_entry
from .const import CONF_FAKE_NAME, MOCK_CONFIG
from tests.common import async_fire_time_changed, snapshot_platform
ENTITY_ID = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}"
SWITCH_ENTITY_ID = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}"
TRIGGER_ENTITY_ID = f"{SWITCH_DOMAIN}.fake_trigger"
async def test_setup(
@@ -39,50 +40,56 @@ async def test_setup(
) -> None:
"""Test setup of platform."""
device = FritzDeviceSwitchMock()
trigger = FritzTriggerMock()
with patch("homeassistant.components.fritzbox.PLATFORMS", [Platform.SWITCH]):
entry = await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass,
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
device=device,
fritz=fritz,
trigger=trigger,
)
assert entry.state is ConfigEntryState.LOADED
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None:
"""Test turn device on."""
async def test_switch_turn_on(hass: HomeAssistant, fritz: Mock) -> None:
"""Test turn switch device on."""
device = FritzDeviceSwitchMock()
await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
)
assert device.set_switch_state_on.call_count == 1
async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None:
"""Test turn device off."""
async def test_switch_turn_off(hass: HomeAssistant, fritz: Mock) -> None:
"""Test turn switch device off."""
device = FritzDeviceSwitchMock()
await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
)
assert device.set_switch_state_off.call_count == 1
async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None:
"""Test toggling while device is locked."""
async def test_switch_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None:
"""Test toggling while switch device is locked."""
device = FritzDeviceSwitchMock()
device.lock = True
await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
with pytest.raises(
@@ -90,7 +97,7 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None:
match="Can't toggle switch while manual switching is disabled for the device",
):
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
)
with pytest.raises(
@@ -98,17 +105,23 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None:
match="Can't toggle switch while manual switching is disabled for the device",
):
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, True
)
async def test_update(hass: HomeAssistant, fritz: Mock) -> None:
"""Test update without error."""
device = FritzDeviceSwitchMock()
trigger = FritzTriggerMock()
await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass,
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
device=device,
fritz=fritz,
trigger=trigger,
)
assert fritz().update_devices.call_count == 1
assert fritz().update_triggers.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
@@ -116,6 +129,7 @@ async def test_update(hass: HomeAssistant, fritz: Mock) -> None:
await hass.async_block_till_done(wait_background_tasks=True)
assert fritz().update_devices.call_count == 2
assert fritz().update_triggers.call_count == 2
assert fritz().login.call_count == 1
@@ -124,7 +138,7 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None:
device = FritzDeviceSwitchMock()
fritz().update_devices.side_effect = HTTPError("Boom")
entry = await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
assert entry.state is ConfigEntryState.SETUP_RETRY
assert fritz().update_devices.call_count == 2
@@ -145,10 +159,10 @@ async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock) -> No
device.energy = 0
device.power = 0
await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], device=device, fritz=fritz
)
state = hass.states.get(ENTITY_ID)
state = hass.states.get(SWITCH_ENTITY_ID)
assert state
assert state.state == STATE_UNAVAILABLE
@@ -156,13 +170,19 @@ async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock) -> No
async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None:
"""Test adding new discovered devices during runtime."""
device = FritzDeviceSwitchMock()
trigger = FritzTriggerMock()
await setup_config_entry(
hass, MOCK_CONFIG[DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
hass,
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
device=device,
fritz=fritz,
trigger=trigger,
)
state = hass.states.get(ENTITY_ID)
assert state
assert hass.states.get(SWITCH_ENTITY_ID)
assert hass.states.get(TRIGGER_ENTITY_ID)
# add new switch device
new_device = FritzDeviceSwitchMock()
new_device.ain = "7890 1234"
new_device.name = "new_switch"
@@ -172,5 +192,48 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None:
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done(wait_background_tasks=True)
state = hass.states.get(f"{SWITCH_DOMAIN}.new_switch")
assert state
assert hass.states.get(f"{SWITCH_DOMAIN}.new_switch")
# add new trigger
new_trigger = FritzTriggerMock()
new_trigger.ain = "trg7890 1234"
new_trigger.name = "new_trigger"
set_devices(fritz, triggers=[trigger, new_trigger])
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done(wait_background_tasks=True)
assert hass.states.get(f"{SWITCH_DOMAIN}.new_trigger")
async def test_activate_trigger(hass: HomeAssistant, fritz: Mock) -> None:
"""Test activating a FRITZ! trigger."""
trigger = FritzTriggerMock()
await setup_config_entry(
hass,
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
fritz=fritz,
trigger=trigger,
)
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: TRIGGER_ENTITY_ID}, True
)
assert fritz().set_trigger_active.call_count == 1
async def test_deactivate_trigger(hass: HomeAssistant, fritz: Mock) -> None:
"""Test deactivating a FRITZ! trigger."""
trigger = FritzTriggerMock()
await setup_config_entry(
hass,
MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
fritz=fritz,
trigger=trigger,
)
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: TRIGGER_ENTITY_ID}, True
)
assert fritz().set_trigger_inactive.call_count == 1

View File

@@ -163,7 +163,7 @@
"code": "CMBrewByWeightDoses",
"index": 1,
"output": {
"scaleConnected": false,
"scaleConnected": true,
"availableModes": ["Continuous"],
"mode": "Continuous",
"doses": {

View File

@@ -1,4 +1,122 @@
# serializer version: 1
# name: test_brew_by_weight_dose[Linea Mini][entry-dose-1]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,
'min': 5,
'mode': <NumberMode.AUTO: 'auto'>,
'step': 0.1,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'number',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'number.lm012345_brew_by_weight_dose_1',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <NumberDeviceClass.WEIGHT: 'weight'>,
'original_icon': None,
'original_name': 'Brew by weight Dose 1',
'platform': 'lamarzocco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'bbw_dose',
'unique_id': 'LM012345_bbw_dose_1',
'unit_of_measurement': <UnitOfMass.GRAMS: 'g'>,
})
# ---
# name: test_brew_by_weight_dose[Linea Mini][entry-dose-2]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,
'min': 5,
'mode': <NumberMode.AUTO: 'auto'>,
'step': 0.1,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'number',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'number.lm012345_brew_by_weight_dose_2',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <NumberDeviceClass.WEIGHT: 'weight'>,
'original_icon': None,
'original_name': 'Brew by weight Dose 2',
'platform': 'lamarzocco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'bbw_dose',
'unique_id': 'LM012345_bbw_dose_2',
'unit_of_measurement': <UnitOfMass.GRAMS: 'g'>,
})
# ---
# name: test_brew_by_weight_dose[Linea Mini][state-dose-1]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'weight',
'friendly_name': 'LM012345 Brew by weight Dose 1',
'max': 100,
'min': 5,
'mode': <NumberMode.AUTO: 'auto'>,
'step': 0.1,
'unit_of_measurement': <UnitOfMass.GRAMS: 'g'>,
}),
'context': <ANY>,
'entity_id': 'number.lm012345_brew_by_weight_dose_1',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '34.5',
})
# ---
# name: test_brew_by_weight_dose[Linea Mini][state-dose-2]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'weight',
'friendly_name': 'LM012345 Brew by weight Dose 2',
'max': 100,
'min': 5,
'mode': <NumberMode.AUTO: 'auto'>,
'step': 0.1,
'unit_of_measurement': <UnitOfMass.GRAMS: 'g'>,
}),
'context': <ANY>,
'entity_id': 'number.lm012345_brew_by_weight_dose_2',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '17.5',
})
# ---
# name: test_general_numbers[coffee_target_temperature-94-set_coffee_target_temperature-kwargs0]
StateSnapshot({
'attributes': ReadOnlyDict({

View File

@@ -1,4 +1,63 @@
# serializer version: 1
# name: test_bbw_dose_mode[Linea Mini]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'LM012345 Brew by weight dose mode',
'options': list([
'continuous',
'dose1',
'dose2',
]),
}),
'context': <ANY>,
'entity_id': 'select.lm012345_brew_by_weight_dose_mode',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'continuous',
})
# ---
# name: test_bbw_dose_mode[Linea Mini].1
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'continuous',
'dose1',
'dose2',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'select',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'select.lm012345_brew_by_weight_dose_mode',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Brew by weight dose mode',
'platform': 'lamarzocco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'bbw_dose_mode',
'unique_id': 'LM012345_bbw_dose_mode',
'unit_of_measurement': None,
})
# ---
# name: test_pre_brew_infusion_select[GS3 AV]
StateSnapshot({
'attributes': ReadOnlyDict({

View File

@@ -4,6 +4,7 @@ from typing import Any
from unittest.mock import MagicMock
from pylamarzocco.const import (
DoseMode,
ModelName,
PreExtractionMode,
SmartStandByType,
@@ -27,6 +28,11 @@ from . import async_init_integration
from tests.common import MockConfigEntry
DOSE_MODE_HA_TO_LM = {
"dose1": DoseMode.DOSE_1,
"dose2": DoseMode.DOSE_2,
}
@pytest.mark.parametrize(
("entity_name", "value", "func_name", "kwargs"),
@@ -291,3 +297,45 @@ async def test_steam_temperature(
mock_lamarzocco.set_steam_target_temperature.assert_called_once_with(
temperature=128.3,
)
@pytest.mark.parametrize("device_fixture", [ModelName.LINEA_MINI])
async def test_brew_by_weight_dose(
hass: HomeAssistant,
mock_lamarzocco: MagicMock,
mock_config_entry: MockConfigEntry,
entity_registry: er.EntityRegistry,
snapshot: SnapshotAssertion,
) -> None:
"""Test brew by weight dose."""
await async_init_integration(hass, mock_config_entry)
serial_number = mock_lamarzocco.serial_number
for dose in (1, 2):
entity_id = f"number.{serial_number}_brew_by_weight_dose_{dose}"
state = hass.states.get(entity_id)
assert state
assert state == snapshot(name=f"state-dose-{dose}")
entry = entity_registry.async_get(state.entity_id)
assert entry
assert entry.device_id
assert entry == snapshot(name=f"entry-dose-{dose}")
# service call
await hass.services.async_call(
NUMBER_DOMAIN,
SERVICE_SET_VALUE,
{
ATTR_ENTITY_ID: entity_id,
ATTR_VALUE: 42,
},
blocking=True,
)
mock_lamarzocco.set_brew_by_weight_dose.assert_called_with(
dose=DOSE_MODE_HA_TO_LM[f"dose{dose}"],
value=42,
)

View File

@@ -3,6 +3,7 @@
from unittest.mock import MagicMock
from pylamarzocco.const import (
DoseMode,
ModelName,
PreExtractionMode,
SmartStandByType,
@@ -193,3 +194,40 @@ async def test_select_errors(
blocking=True,
)
assert exc_info.value.translation_key == "select_option_error"
@pytest.mark.usefixtures("init_integration")
@pytest.mark.parametrize("device_fixture", [ModelName.LINEA_MINI])
async def test_bbw_dose_mode(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
mock_lamarzocco: MagicMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test the La Marzocco Brew By Weight Mode Select (only for Mini R Models)."""
serial_number = mock_lamarzocco.serial_number
state = hass.states.get(f"select.{serial_number}_brew_by_weight_dose_mode")
assert state
assert state == snapshot
entry = entity_registry.async_get(state.entity_id)
assert entry
assert entry == snapshot
# on/off service calls
await hass.services.async_call(
SELECT_DOMAIN,
SERVICE_SELECT_OPTION,
{
ATTR_ENTITY_ID: f"select.{serial_number}_brew_by_weight_dose_mode",
ATTR_OPTION: "dose2",
},
blocking=True,
)
mock_lamarzocco.set_brew_by_weight_dose_mode.assert_called_once_with(
mode=DoseMode.DOSE_2
)

View File

@@ -110,7 +110,7 @@
},
{
"date": "2024-01-21",
"entryType": "lunch",
"entryType": "dessert",
"title": "",
"text": "",
"recipeId": "27455eb2-31d3-4682-84ff-02a114bf293a",
@@ -178,7 +178,7 @@
},
{
"date": "2024-01-21",
"entryType": "dinner",
"entryType": "snack",
"title": "",
"text": "",
"recipeId": "48f39d27-4b8e-4c14-bf36-4e1e6497e75e",
@@ -218,7 +218,7 @@
},
{
"date": "2024-01-21",
"entryType": "dinner",
"entryType": "drink",
"title": "",
"text": "",
"recipeId": "27455eb2-31d3-4682-84ff-02a114bf293a",

View File

@@ -256,7 +256,7 @@
},
{
"date": "2024-01-23",
"entryType": "dinner",
"entryType": "dessert",
"title": "",
"text": "",
"recipeId": "47595e4c-52bc-441d-b273-3edf4258806d",
@@ -500,7 +500,7 @@
},
{
"date": "2024-01-22",
"entryType": "dinner",
"entryType": "drink",
"title": "",
"text": "",
"recipeId": "9d553779-607e-471b-acf3-84e6be27b159",
@@ -574,7 +574,7 @@
},
{
"date": "2024-01-22",
"entryType": "dinner",
"entryType": "snack",
"title": "",
"text": "",
"recipeId": "55c88810-4cf1-4d86-ae50-63b15fd173fb",

Some files were not shown because too many files have changed in this diff Show More