Compare commits

..

40 Commits

Author SHA1 Message Date
farmio
9d979a1d7f review; mark disabled by default 2025-12-03 20:09:09 +01:00
farmio
0a1214256a Add counter for KNX DataSecure undecodable telegrams 2025-12-03 14:52:04 +01:00
Artur Pragacz
1a60c46d67 Bump aioonkyo to 0.4.0 (#157838) 2025-12-03 14:46:52 +01:00
Matthias Alphart
62fba5ca20 Update xknx to 3.12.0 (#157835) 2025-12-03 14:40:40 +01:00
victorigualada
b54cde795c Bump hass-nabucasa from 1.6.2 to 1.7.0 (#157834) 2025-12-03 14:37:45 +01:00
victorigualada
0f456373bf Allow non strict response_format structures for Cloud LLM generation (#157822) 2025-12-03 14:31:09 +01:00
IAmStiven
a5042027b8 Add support for new ElevenLabs model Scribe v2 (#156961) 2025-12-03 14:29:25 +01:00
Franck Nijhof
b15b5ba95c Add final learn more and feedback links for purpose-specific triggers and conditions preview feature (#157830) 2025-12-03 13:14:37 +01:00
Robert Resch
cd6e72798e Prioritize default stun port over alternative (#157829) 2025-12-03 13:14:28 +01:00
Kamil Breguła
739157e59f Simplify availability property in WLED (#157800)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2025-12-03 13:00:21 +01:00
torben-iometer
267aa1af42 bump iometer to v0.3.0 (#157826) 2025-12-03 12:47:05 +01:00
Michael
7328b61a69 Add integration_type to Oralb (#157828) 2025-12-03 12:46:50 +01:00
Allen Porter
203f2fb364 Bump google-nest-sdm to 9.1.2 (#157812)
Co-authored-by: Josef Zweck <josef@zweck.dev>
Co-authored-by: Robert Resch <robert@resch.dev>
2025-12-03 11:23:00 +01:00
Josef Zweck
b956c17ce4 Mark nordpool as service integration_type (#157810) 2025-12-03 11:22:42 +01:00
Marc Mueller
5163dc0567 Fix ping TypeError when killing the process (#157794) 2025-12-03 11:22:14 +01:00
Allen Porter
31a0478717 Bump python-roborock to 3.9.2 (#157815)
Co-authored-by: Robert Resch <robert@resch.dev>
2025-12-03 10:56:56 +01:00
dependabot[bot]
24da3f0db8 Bump actions/checkout from 6.0.0 to 6.0.1 (#157806)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-03 10:38:45 +01:00
dependabot[bot]
786922fc5d Bump actions/stale from 10.1.0 to 10.1.1 (#157807)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-03 10:36:44 +01:00
Erik Montnemery
c2f8b6986b Pin Python point release used in CI (#157819) 2025-12-03 10:26:15 +01:00
hanwg
0a0832671f Fix bug in group notify entities when title is missing (#157171)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-12-03 09:44:01 +01:00
Aidan Timson
7b353d7ad4 Add levoit virtual integration (#157618) 2025-12-03 09:38:01 +01:00
epenet
99de73a729 Update SFR Box unit of measurement (#157813) 2025-12-03 08:46:59 +01:00
Joost Lekkerkerker
1995fbd252 Make occupancy trigger check occupancy instead of presence (#157791) 2025-12-03 08:15:31 +01:00
Kamil Breguła
315ea9dc76 Update release URL in WLED (#157801) 2025-12-03 05:55:03 +01:00
Josef Zweck
639a96f8cb La Marzocco add Bluetooth offline mode (#157011) 2025-12-03 05:53:27 +01:00
Stefan Agner
b6786c5a42 Add storage link to low disk space repair issue (#157786)
Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
2025-12-02 22:14:24 -05:00
Kamil Breguła
6f6e9b8057 Add quality scale for WLED (#155482)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2025-12-03 01:19:02 +01:00
johanzander
e0c687e415 Remove extra logging in Growatt (#157788) 2025-12-03 00:38:03 +01:00
Abestanis
982362110c Allow to configure KNX time, date & datetime entities via UI (#157603) 2025-12-02 23:45:43 +01:00
Lukas
90dc3a8fdf Pooldose: add number platform (#157787) 2025-12-02 23:31:44 +01:00
J. Nick Koston
5112742b71 Bump habluetooth to 5.8.0 (#157771) 2025-12-02 15:55:37 -06:00
johanzander
8899bc01bd Add bronze quality scale to Growatt Server integration (#154649)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-02 22:36:51 +01:00
Joris Pelgröm
ed8f9105ff Bump letpot to 0.6.4 (#157781) 2025-12-02 22:12:41 +01:00
Michael Hansen
185de98f5e Bump hassil to 3.5.0 (#157780) 2025-12-02 22:11:00 +01:00
Paulus Schoutsen
e857abb43f Allow fetching the Cloud ICE servers (#157774) 2025-12-02 16:02:30 -05:00
Joost Lekkerkerker
5b1829f3a1 Add hood filter usage entity to SmartThings (#157775) 2025-12-02 21:45:17 +01:00
Kamil Breguła
520156a33a Handle unsupported version in WLED (#157778)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2025-12-02 21:20:36 +01:00
Kevin Stillhammer
e3b5342b76 use sentence casing in binary_sensor for fressnapf_tracker (#157772) 2025-12-02 21:06:18 +01:00
Josef Zweck
951b19e80c Add integration_type for tedee (#157776) 2025-12-02 21:04:51 +01:00
Sab44
e2351ecec2 Fix orphaned devices not being removed during integration startup (#155900) 2025-12-02 21:03:37 +01:00
130 changed files with 3666 additions and 1686 deletions

View File

@@ -30,7 +30,7 @@ jobs:
architectures: ${{ env.ARCHITECTURES }}
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
@@ -96,7 +96,7 @@ jobs:
os: ubuntu-24.04-arm
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -273,7 +273,7 @@ jobs:
- green
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set build additional args
run: |
@@ -311,7 +311,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -464,7 +464,7 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
@@ -509,7 +509,7 @@ jobs:
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0

View File

@@ -41,8 +41,8 @@ env:
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2026.1"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
DEFAULT_PYTHON: "3.13.9"
ALL_PYTHON_VERSIONS: "['3.13.9', '3.14.0']"
# 10.3 is the oldest supported version
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
# 10.6 is the current long-term-support
@@ -99,7 +99,7 @@ jobs:
steps:
- &checkout
name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: |

View File

@@ -21,7 +21,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Initialize CodeQL
uses: github/codeql-action/init@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6

View File

@@ -17,7 +17,7 @@ jobs:
# - No PRs marked as no-stale
# - No issues (-1)
- name: 60 days stale PRs policy
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 60
@@ -57,7 +57,7 @@ jobs:
# - No issues marked as no-stale or help-wanted
# - No PRs (-1)
- name: 90 days stale issues
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
with:
repo-token: ${{ steps.token.outputs.token }}
days-before-stale: 90
@@ -87,7 +87,7 @@ jobs:
# - No Issues marked as no-stale or help-wanted
# - No PRs (-1)
- name: Needs more information stale issues policy
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
with:
repo-token: ${{ steps.token.outputs.token }}
only-labels: "needs-more-information"

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0

View File

@@ -31,7 +31,7 @@ jobs:
steps:
- &checkout
name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.4.0"]
"requirements": ["hassil==3.5.0"]
}

View File

@@ -8,6 +8,8 @@
"integration_type": "system",
"preview_features": {
"new_triggers_conditions": {
"feedback_url": "https://forms.gle/fWFZqf5MzuwWTsCH8",
"learn_more_url": "https://www.home-assistant.io/blog/2025/12/03/release-202512/#purpose-specific-triggers-and-conditions",
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation"
}
},

View File

@@ -1,6 +1,6 @@
{
"common": {
"trigger_behavior_description_presence": "The behavior of the targeted presence sensors to trigger on.",
"trigger_behavior_description_occupancy": "The behavior of the targeted occupancy sensors to trigger on.",
"trigger_behavior_name": "Behavior"
},
"device_automation": {
@@ -336,17 +336,17 @@
"description": "Triggers after one or more occupancy sensors stop detecting occupancy.",
"fields": {
"behavior": {
"description": "[%key:component::binary_sensor::common::trigger_behavior_description_presence%]",
"description": "[%key:component::binary_sensor::common::trigger_behavior_description_occupancy%]",
"name": "[%key:component::binary_sensor::common::trigger_behavior_name%]"
}
},
"name": "Occupancy cleared"
},
"occupancy_detected": {
"description": "Triggers after one ore more occupancy sensors start detecting occupancy.",
"description": "Triggers after one or more occupancy sensors start detecting occupancy.",
"fields": {
"behavior": {
"description": "[%key:component::binary_sensor::common::trigger_behavior_description_presence%]",
"description": "[%key:component::binary_sensor::common::trigger_behavior_description_occupancy%]",
"name": "[%key:component::binary_sensor::common::trigger_behavior_name%]"
}
},

View File

@@ -15,11 +15,11 @@ occupancy_cleared:
target:
entity:
domain: binary_sensor
device_class: presence
device_class: occupancy
occupancy_detected:
fields: *trigger_common_fields
target:
entity:
domain: binary_sensor
device_class: presence
device_class: occupancy

View File

@@ -21,6 +21,6 @@
"bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.4",
"dbus-fast==3.1.2",
"habluetooth==5.7.0"
"habluetooth==5.8.0"
]
}

View File

@@ -407,8 +407,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return [
RTCIceServer(
urls=[
"stun:stun.home-assistant.io:80",
"stun:stun.home-assistant.io:3478",
"stun:stun.home-assistant.io:80",
]
),
]

View File

@@ -71,6 +71,7 @@ class CloudClient(Interface):
self._google_config_init_lock = asyncio.Lock()
self._relayer_region: str | None = None
self._cloud_ice_servers_listener: Callable[[], None] | None = None
self._ice_servers: list[RTCIceServer] = []
@property
def base_path(self) -> Path:
@@ -117,6 +118,11 @@ class CloudClient(Interface):
"""Return the connected relayer region."""
return self._relayer_region
@property
def ice_servers(self) -> list[RTCIceServer]:
"""Return the current ICE servers."""
return self._ice_servers
async def get_alexa_config(self) -> alexa_config.CloudAlexaConfig:
"""Return Alexa config."""
if self._alexa_config is None:
@@ -203,11 +209,8 @@ class CloudClient(Interface):
ice_servers: list[RTCIceServer],
) -> Callable[[], None]:
"""Register cloud ice server."""
def get_ice_servers() -> list[RTCIceServer]:
return ice_servers
return async_register_ice_servers(self._hass, get_ice_servers)
self._ice_servers = ice_servers
return async_register_ice_servers(self._hass, lambda: self._ice_servers)
async def async_register_cloud_ice_servers_listener(
prefs: CloudPreferences,
@@ -268,6 +271,7 @@ class CloudClient(Interface):
async def logout_cleanups(self) -> None:
"""Cleanup some stuff after logout."""
self._ice_servers = []
await self.prefs.async_set_username(None)
if self._alexa_config:

View File

@@ -561,7 +561,7 @@ class BaseCloudLLMEntity(Entity):
"schema": _format_structured_output(
structure, chat_log.llm_api
),
"strict": True,
"strict": False,
},
}

View File

@@ -99,6 +99,7 @@ def async_setup(hass: HomeAssistant) -> None:
websocket_api.async_register_command(hass, websocket_hook_delete)
websocket_api.async_register_command(hass, websocket_remote_connect)
websocket_api.async_register_command(hass, websocket_remote_disconnect)
websocket_api.async_register_command(hass, websocket_webrtc_ice_servers)
websocket_api.async_register_command(hass, google_assistant_get)
websocket_api.async_register_command(hass, google_assistant_list)
@@ -1107,6 +1108,7 @@ async def alexa_sync(
@websocket_api.websocket_command({"type": "cloud/tts/info"})
@callback
def tts_info(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
@@ -1134,3 +1136,22 @@ def tts_info(
)
connection.send_result(msg["id"], {"languages": result})
@websocket_api.websocket_command(
{
vol.Required("type"): "cloud/webrtc/ice_servers",
}
)
@_require_cloud_login
@callback
def websocket_webrtc_ice_servers(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Handle get WebRTC ICE servers websocket command."""
connection.send_result(
msg["id"],
[server.to_dict() for server in hass.data[DATA_CLOUD].client.ice_servers],
)

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.6.2"],
"requirements": ["hass-nabucasa==1.7.0"],
"single_config_entry": true
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.12.2"]
"requirements": ["hassil==3.5.0", "home-assistant-intents==2025.12.2"]
}

View File

@@ -17,7 +17,7 @@ DEFAULT_TTS_MODEL = "eleven_multilingual_v2"
DEFAULT_STABILITY = 0.5
DEFAULT_SIMILARITY = 0.75
DEFAULT_STT_AUTO_LANGUAGE = False
DEFAULT_STT_MODEL = "scribe_v1"
DEFAULT_STT_MODEL = "scribe_v2"
DEFAULT_STYLE = 0
DEFAULT_USE_SPEAKER_BOOST = True
@@ -129,4 +129,5 @@ STT_LANGUAGES = [
STT_MODELS = {
"scribe_v1": "Scribe v1",
"scribe_v1_experimental": "Scribe v1 Experimental",
"scribe_v2": "Scribe v2 Realtime",
}

View File

@@ -49,7 +49,7 @@
"entity": {
"binary_sensor": {
"deep_sleep": {
"name": "Deep Sleep"
"name": "Deep sleep"
}
}
}

View File

@@ -18,10 +18,12 @@ from homeassistant.components.notify import (
SERVICE_SEND_MESSAGE,
BaseNotificationService,
NotifyEntity,
NotifyEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_ACTION,
CONF_ENTITIES,
CONF_SERVICE,
@@ -173,14 +175,23 @@ class NotifyGroup(GroupEntity, NotifyEntity):
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a message to all members of the group."""
data = {
ATTR_MESSAGE: message,
ATTR_ENTITY_ID: self._entity_ids,
}
# add title only if supported and provided
if (
title is not None
and self._attr_supported_features & NotifyEntityFeature.TITLE
):
data[ATTR_TITLE] = title
await self.hass.services.async_call(
NOTIFY_DOMAIN,
SERVICE_SEND_MESSAGE,
{
ATTR_MESSAGE: message,
ATTR_TITLE: title,
ATTR_ENTITY_ID: self._entity_ids,
},
data,
blocking=True,
context=self._context,
)
@@ -194,3 +205,15 @@ class NotifyGroup(GroupEntity, NotifyEntity):
for entity_id in self._entity_ids
if (state := self.hass.states.get(entity_id)) is not None
)
# Support title if all members support it
self._attr_supported_features |= NotifyEntityFeature.TITLE
for entity_id in self._entity_ids:
state = self.hass.states.get(entity_id)
if (
state is None
or not state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
& NotifyEntityFeature.TITLE
):
self._attr_supported_features &= ~NotifyEntityFeature.TITLE
break

View File

@@ -37,7 +37,6 @@ def get_device_list_classic(
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
# DEBUG: Log the actual response structure
except Exception as ex:
_LOGGER.error("DEBUG - Login response: %s", login_response)
raise ConfigEntryError(
f"Error communicating with Growatt API during login: {ex}"
) from ex

View File

@@ -113,9 +113,6 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
min_settings = self.api.min_settings(self.device_id)
min_energy = self.api.min_energy(self.device_id)
except growattServer.GrowattV1ApiError as err:
_LOGGER.error(
"Error fetching min device data for %s: %s", self.device_id, err
)
raise UpdateFailed(f"Error fetching min device data: {err}") from err
min_info = {**min_details, **min_settings, **min_energy}
@@ -180,7 +177,6 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
try:
return await self.hass.async_add_executor_job(self._sync_update_data)
except json.decoder.JSONDecodeError as err:
_LOGGER.error("Unable to fetch data from Growatt server: %s", err)
raise UpdateFailed(f"Error fetching data: {err}") from err
def get_currency(self):

View File

@@ -0,0 +1,74 @@
rules:
# Bronze
action-setup: done
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow:
status: todo
comment: data-descriptions missing
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: todo
comment: Update server URL dropdown to show regional descriptions (e.g., 'China', 'United States') instead of raw URLs.
docs-installation-parameters: todo
entity-unavailable:
status: todo
comment: Replace bare Exception catches in __init__.py with specific growattServer exceptions.
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: todo
test-coverage: todo
# Gold
devices:
status: todo
comment: Add serial_number field to DeviceInfo in sensor, number, and switch platforms using device_id/serial_id.
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category:
status: todo
comment: Add EntityCategory.DIAGNOSTIC to temperature and other diagnostic sensors. Merge GrowattRequiredKeysMixin into GrowattSensorEntityDescription using kw_only=True.
entity-device-class:
status: todo
comment: Replace custom precision field with suggested_display_precision to preserve full data granularity.
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: Integration does not raise repairable issues.
stale-devices: todo
# Platinum
async-dependency: todo
inject-websession: todo
strict-typing: todo

View File

@@ -161,6 +161,7 @@ EXTRA_PLACEHOLDERS = {
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED: HELP_URLS,
ISSUE_KEY_SYSTEM_FREE_SPACE: {
"more_info_free_space": "https://www.home-assistant.io/more-info/free-space",
"storage_url": "/config/storage",
},
ISSUE_KEY_ADDON_PWNED: {
"more_info_pwned": "https://www.home-assistant.io/more-info/pwned-passwords",

View File

@@ -130,7 +130,7 @@
"title": "Restart(s) required"
},
"issue_system_free_space": {
"description": "The data disk has only {free_space}GB free space left. This may cause issues with system stability and interfere with functionality such as backups and updates. See [clear up storage]({more_info_free_space}) for tips on how to free up space.",
"description": "The data disk has only {free_space}GB free space left. This may cause issues with system stability and interfere with functionality such as backups and updates. Go to [storage]({storage_url}) to see what is taking up space or see [clear up storage]({more_info_free_space}) for tips on how to free up space.",
"title": "Data disk is running low on free space"
},
"issue_system_multiple_data_disks": {

View File

@@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["iometer==0.2.0"],
"requirements": ["iometer==0.3.0"],
"zeroconf": ["_iometer._tcp.local."]
}

View File

@@ -162,8 +162,11 @@ SUPPORTED_PLATFORMS_UI: Final = {
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.COVER,
Platform.DATE,
Platform.DATETIME,
Platform.LIGHT,
Platform.SWITCH,
Platform.TIME,
}
# Map KNX controller modes to HA modes. This list might not be complete.

View File

@@ -3,8 +3,8 @@
from __future__ import annotations
from datetime import date as dt_date
from typing import Any
from xknx import XKNX
from xknx.devices import DateDevice as XknxDateDevice
from xknx.dpt.dpt_11 import KNXDate as XKNXDate
@@ -18,7 +18,10 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
async_get_current_platform,
)
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
@@ -26,11 +29,14 @@ from .const import (
CONF_RESPOND_TO_READ,
CONF_STATE_ADDRESS,
CONF_SYNC_STATE,
DOMAIN,
KNX_ADDRESS,
KNX_MODULE_KEY,
)
from .entity import KnxYamlEntity
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
from .knx_module import KNXModule
from .storage.const import CONF_ENTITY, CONF_GA_DATE
from .storage.util import ConfigExtractor
async def async_setup_entry(
@@ -40,40 +46,36 @@ async def async_setup_entry(
) -> None:
"""Set up entities for KNX platform."""
knx_module = hass.data[KNX_MODULE_KEY]
config: list[ConfigType] = knx_module.config_yaml[Platform.DATE]
async_add_entities(
KNXDateEntity(knx_module, entity_config) for entity_config in config
platform = async_get_current_platform()
knx_module.config_store.add_platform(
platform=Platform.DATE,
controller=KnxUiEntityPlatformController(
knx_module=knx_module,
entity_platform=platform,
entity_class=KnxUiDate,
),
)
def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateDevice:
"""Return a XKNX DateTime object to be used within XKNX."""
return XknxDateDevice(
xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
entities: list[KnxYamlEntity | KnxUiEntity] = []
if yaml_platform_config := knx_module.config_yaml.get(Platform.DATE):
entities.extend(
KnxYamlDate(knx_module, entity_config)
for entity_config in yaml_platform_config
)
if ui_config := knx_module.config_store.data["entities"].get(Platform.DATE):
entities.extend(
KnxUiDate(knx_module, unique_id, config)
for unique_id, config in ui_config.items()
)
if entities:
async_add_entities(entities)
class KNXDateEntity(KnxYamlEntity, DateEntity, RestoreEntity):
class _KNXDate(DateEntity, RestoreEntity):
"""Representation of a KNX date."""
_device: XknxDateDevice
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX time."""
super().__init__(
knx_module=knx_module,
device=_create_xknx_device(knx_module.xknx, config),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
async def async_added_to_hass(self) -> None:
"""Restore last state."""
await super().async_added_to_hass()
@@ -94,3 +96,52 @@ class KNXDateEntity(KnxYamlEntity, DateEntity, RestoreEntity):
async def async_set_value(self, value: dt_date) -> None:
"""Change the value."""
await self._device.set(value)
class KnxYamlDate(_KNXDate, KnxYamlEntity):
"""Representation of a KNX date configured from YAML."""
_device: XknxDateDevice
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX date."""
super().__init__(
knx_module=knx_module,
device=XknxDateDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiDate(_KNXDate, KnxUiEntity):
"""Representation of a KNX date configured from the UI."""
_device: XknxDateDevice
def __init__(
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
) -> None:
"""Initialize KNX date."""
super().__init__(
knx_module=knx_module,
unique_id=unique_id,
entity_config=config[CONF_ENTITY],
)
knx_conf = ConfigExtractor(config[DOMAIN])
self._device = XknxDateDevice(
knx_module.xknx,
name=config[CONF_ENTITY][CONF_NAME],
localtime=False,
group_address=knx_conf.get_write(CONF_GA_DATE),
group_address_state=knx_conf.get_state_and_passive(CONF_GA_DATE),
respond_to_read=knx_conf.get(CONF_RESPOND_TO_READ),
sync_state=knx_conf.get(CONF_SYNC_STATE),
)

View File

@@ -3,8 +3,8 @@
from __future__ import annotations
from datetime import datetime
from typing import Any
from xknx import XKNX
from xknx.devices import DateTimeDevice as XknxDateTimeDevice
from xknx.dpt.dpt_19 import KNXDateTime as XKNXDateTime
@@ -18,7 +18,10 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
async_get_current_platform,
)
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
@@ -27,11 +30,14 @@ from .const import (
CONF_RESPOND_TO_READ,
CONF_STATE_ADDRESS,
CONF_SYNC_STATE,
DOMAIN,
KNX_ADDRESS,
KNX_MODULE_KEY,
)
from .entity import KnxYamlEntity
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
from .knx_module import KNXModule
from .storage.const import CONF_ENTITY, CONF_GA_DATETIME
from .storage.util import ConfigExtractor
async def async_setup_entry(
@@ -41,40 +47,36 @@ async def async_setup_entry(
) -> None:
"""Set up entities for KNX platform."""
knx_module = hass.data[KNX_MODULE_KEY]
config: list[ConfigType] = knx_module.config_yaml[Platform.DATETIME]
async_add_entities(
KNXDateTimeEntity(knx_module, entity_config) for entity_config in config
platform = async_get_current_platform()
knx_module.config_store.add_platform(
platform=Platform.DATETIME,
controller=KnxUiEntityPlatformController(
knx_module=knx_module,
entity_platform=platform,
entity_class=KnxUiDateTime,
),
)
def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTimeDevice:
"""Return a XKNX DateTime object to be used within XKNX."""
return XknxDateTimeDevice(
xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
entities: list[KnxYamlEntity | KnxUiEntity] = []
if yaml_platform_config := knx_module.config_yaml.get(Platform.DATETIME):
entities.extend(
KnxYamlDateTime(knx_module, entity_config)
for entity_config in yaml_platform_config
)
if ui_config := knx_module.config_store.data["entities"].get(Platform.DATETIME):
entities.extend(
KnxUiDateTime(knx_module, unique_id, config)
for unique_id, config in ui_config.items()
)
if entities:
async_add_entities(entities)
class KNXDateTimeEntity(KnxYamlEntity, DateTimeEntity, RestoreEntity):
class _KNXDateTime(DateTimeEntity, RestoreEntity):
"""Representation of a KNX datetime."""
_device: XknxDateTimeDevice
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX time."""
super().__init__(
knx_module=knx_module,
device=_create_xknx_device(knx_module.xknx, config),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
async def async_added_to_hass(self) -> None:
"""Restore last state."""
await super().async_added_to_hass()
@@ -99,3 +101,52 @@ class KNXDateTimeEntity(KnxYamlEntity, DateTimeEntity, RestoreEntity):
async def async_set_value(self, value: datetime) -> None:
"""Change the value."""
await self._device.set(value.astimezone(dt_util.get_default_time_zone()))
class KnxYamlDateTime(_KNXDateTime, KnxYamlEntity):
"""Representation of a KNX datetime configured from YAML."""
_device: XknxDateTimeDevice
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX datetime."""
super().__init__(
knx_module=knx_module,
device=XknxDateTimeDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiDateTime(_KNXDateTime, KnxUiEntity):
"""Representation of a KNX datetime configured from the UI."""
_device: XknxDateTimeDevice
def __init__(
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
) -> None:
"""Initialize KNX datetime."""
super().__init__(
knx_module=knx_module,
unique_id=unique_id,
entity_config=config[CONF_ENTITY],
)
knx_conf = ConfigExtractor(config[DOMAIN])
self._device = XknxDateTimeDevice(
knx_module.xknx,
name=config[CONF_ENTITY][CONF_NAME],
localtime=False,
group_address=knx_conf.get_write(CONF_GA_DATETIME),
group_address_state=knx_conf.get_state_and_passive(CONF_GA_DATETIME),
respond_to_read=knx_conf.get(CONF_RESPOND_TO_READ),
sync_state=knx_conf.get(CONF_SYNC_STATE),
)

View File

@@ -21,6 +21,9 @@
"telegram_count": {
"default": "mdi:plus-network"
},
"telegrams_data_secure_undecodable": {
"default": "mdi:lock-alert"
},
"telegrams_incoming": {
"default": "mdi:upload-network"
},

View File

@@ -11,7 +11,7 @@
"loggers": ["xknx", "xknxproject"],
"quality_scale": "silver",
"requirements": [
"xknx==3.11.0",
"xknx==3.12.0",
"xknxproject==3.8.2",
"knx-frontend==2025.10.31.195356"
],

View File

@@ -108,6 +108,12 @@ SYSTEM_ENTITY_DESCRIPTIONS = (
+ knx.xknx.connection_manager.cemi_count_incoming
+ knx.xknx.connection_manager.cemi_count_incoming_error,
),
KNXSystemEntityDescription(
key="telegrams_data_secure_undecodable",
entity_registry_enabled_default=False,
state_class=SensorStateClass.TOTAL_INCREASING,
value_fn=lambda knx: knx.xknx.connection_manager.undecoded_data_secure,
),
)

View File

@@ -13,6 +13,9 @@ CONF_DPT: Final = "dpt"
CONF_GA_SENSOR: Final = "ga_sensor"
CONF_GA_SWITCH: Final = "ga_switch"
CONF_GA_DATE: Final = "ga_date"
CONF_GA_DATETIME: Final = "ga_datetime"
CONF_GA_TIME: Final = "ga_time"
# Climate
CONF_GA_TEMPERATURE_CURRENT: Final = "ga_temperature_current"

View File

@@ -46,6 +46,8 @@ from .const import (
CONF_GA_COLOR_TEMP,
CONF_GA_CONTROLLER_MODE,
CONF_GA_CONTROLLER_STATUS,
CONF_GA_DATE,
CONF_GA_DATETIME,
CONF_GA_FAN_SPEED,
CONF_GA_FAN_SWING,
CONF_GA_FAN_SWING_HORIZONTAL,
@@ -72,6 +74,7 @@ from .const import (
CONF_GA_SWITCH,
CONF_GA_TEMPERATURE_CURRENT,
CONF_GA_TEMPERATURE_TARGET,
CONF_GA_TIME,
CONF_GA_UP_DOWN,
CONF_GA_VALVE,
CONF_GA_WHITE_BRIGHTNESS,
@@ -199,6 +202,24 @@ COVER_KNX_SCHEMA = AllSerializeFirst(
),
)
DATE_KNX_SCHEMA = vol.Schema(
{
vol.Required(CONF_GA_DATE): GASelector(write_required=True, valid_dpt="11.001"),
vol.Optional(CONF_RESPOND_TO_READ, default=False): selector.BooleanSelector(),
vol.Optional(CONF_SYNC_STATE, default=True): SyncStateSelector(),
}
)
DATETIME_KNX_SCHEMA = vol.Schema(
{
vol.Required(CONF_GA_DATETIME): GASelector(
write_required=True, valid_dpt="19.001"
),
vol.Optional(CONF_RESPOND_TO_READ, default=False): selector.BooleanSelector(),
vol.Optional(CONF_SYNC_STATE, default=True): SyncStateSelector(),
}
)
@unique
class LightColorMode(StrEnum):
@@ -336,6 +357,14 @@ SWITCH_KNX_SCHEMA = vol.Schema(
},
)
TIME_KNX_SCHEMA = vol.Schema(
{
vol.Required(CONF_GA_TIME): GASelector(write_required=True, valid_dpt="10.001"),
vol.Optional(CONF_RESPOND_TO_READ, default=False): selector.BooleanSelector(),
vol.Optional(CONF_SYNC_STATE, default=True): SyncStateSelector(),
}
)
@unique
class ConfSetpointShiftMode(StrEnum):
@@ -482,8 +511,11 @@ KNX_SCHEMA_FOR_PLATFORM = {
Platform.BINARY_SENSOR: BINARY_SENSOR_KNX_SCHEMA,
Platform.CLIMATE: CLIMATE_KNX_SCHEMA,
Platform.COVER: COVER_KNX_SCHEMA,
Platform.DATE: DATE_KNX_SCHEMA,
Platform.DATETIME: DATETIME_KNX_SCHEMA,
Platform.LIGHT: LIGHT_KNX_SCHEMA,
Platform.SWITCH: SWITCH_KNX_SCHEMA,
Platform.TIME: TIME_KNX_SCHEMA,
}
ENTITY_STORE_DATA_SCHEMA: VolSchemaType = vol.All(

View File

@@ -176,6 +176,10 @@
"state_address": "State address",
"valid_dpts": "Valid DPTs"
},
"respond_to_read": {
"description": "Respond to GroupValueRead telegrams received to the configured send address.",
"label": "Respond to read"
},
"sync_state": {
"description": "Actively request state updates from KNX bus for state addresses.",
"options": {
@@ -438,6 +442,24 @@
}
}
},
"date": {
"description": "The KNX date platform is used as an interface to date objects.",
"knx": {
"ga_date": {
"description": "The group address of the date object.",
"label": "Date"
}
}
},
"datetime": {
"description": "The KNX datetime platform is used as an interface to date and time objects.",
"knx": {
"ga_datetime": {
"description": "The group address of the date and time object.",
"label": "Date and time"
}
}
},
"header": "Create new entity",
"light": {
"description": "The KNX light platform is used as an interface to dimming actuators, LED controllers, DALI gateways and similar.",
@@ -546,10 +568,15 @@
"invert": {
"description": "Invert payloads before processing or sending.",
"label": "Invert"
},
"respond_to_read": {
"description": "Respond to GroupValueRead telegrams received to the configured send address.",
"label": "Respond to read"
}
}
},
"time": {
"description": "The KNX time platform is used as an interface to time objects.",
"knx": {
"ga_time": {
"description": "The group address of the time object.",
"label": "Time"
}
}
},
@@ -612,6 +639,10 @@
"name": "Telegrams",
"unit_of_measurement": "telegrams"
},
"telegrams_data_secure_undecodable": {
"name": "Undecodable Data Secure telegrams",
"unit_of_measurement": "[%key:component::knx::entity::sensor::telegrams_incoming_error::unit_of_measurement%]"
},
"telegrams_incoming": {
"name": "Incoming telegrams",
"unit_of_measurement": "[%key:component::knx::entity::sensor::telegram_count::unit_of_measurement%]"

View File

@@ -3,8 +3,8 @@
from __future__ import annotations
from datetime import time as dt_time
from typing import Any
from xknx import XKNX
from xknx.devices import TimeDevice as XknxTimeDevice
from xknx.dpt.dpt_10 import KNXTime as XknxTime
@@ -18,7 +18,10 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
async_get_current_platform,
)
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
@@ -26,11 +29,14 @@ from .const import (
CONF_RESPOND_TO_READ,
CONF_STATE_ADDRESS,
CONF_SYNC_STATE,
DOMAIN,
KNX_ADDRESS,
KNX_MODULE_KEY,
)
from .entity import KnxYamlEntity
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
from .knx_module import KNXModule
from .storage.const import CONF_ENTITY, CONF_GA_TIME
from .storage.util import ConfigExtractor
async def async_setup_entry(
@@ -40,40 +46,36 @@ async def async_setup_entry(
) -> None:
"""Set up entities for KNX platform."""
knx_module = hass.data[KNX_MODULE_KEY]
config: list[ConfigType] = knx_module.config_yaml[Platform.TIME]
async_add_entities(
KNXTimeEntity(knx_module, entity_config) for entity_config in config
platform = async_get_current_platform()
knx_module.config_store.add_platform(
platform=Platform.TIME,
controller=KnxUiEntityPlatformController(
knx_module=knx_module,
entity_platform=platform,
entity_class=KnxUiTime,
),
)
def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxTimeDevice:
"""Return a XKNX DateTime object to be used within XKNX."""
return XknxTimeDevice(
xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
entities: list[KnxYamlEntity | KnxUiEntity] = []
if yaml_platform_config := knx_module.config_yaml.get(Platform.TIME):
entities.extend(
KnxYamlTime(knx_module, entity_config)
for entity_config in yaml_platform_config
)
if ui_config := knx_module.config_store.data["entities"].get(Platform.TIME):
entities.extend(
KnxUiTime(knx_module, unique_id, config)
for unique_id, config in ui_config.items()
)
if entities:
async_add_entities(entities)
class KNXTimeEntity(KnxYamlEntity, TimeEntity, RestoreEntity):
class _KNXTime(TimeEntity, RestoreEntity):
"""Representation of a KNX time."""
_device: XknxTimeDevice
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX time."""
super().__init__(
knx_module=knx_module,
device=_create_xknx_device(knx_module.xknx, config),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
async def async_added_to_hass(self) -> None:
"""Restore last state."""
await super().async_added_to_hass()
@@ -94,3 +96,52 @@ class KNXTimeEntity(KnxYamlEntity, TimeEntity, RestoreEntity):
async def async_set_value(self, value: dt_time) -> None:
"""Change the value."""
await self._device.set(value)
class KnxYamlTime(_KNXTime, KnxYamlEntity):
"""Representation of a KNX time configured from YAML."""
_device: XknxTimeDevice
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX time."""
super().__init__(
knx_module=knx_module,
device=XknxTimeDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiTime(_KNXTime, KnxUiEntity):
"""Representation of a KNX time configured from the UI."""
_device: XknxTimeDevice
def __init__(
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
) -> None:
"""Initialize KNX time."""
super().__init__(
knx_module=knx_module,
unique_id=unique_id,
entity_config=config[CONF_ENTITY],
)
knx_conf = ConfigExtractor(config[DOMAIN])
self._device = XknxTimeDevice(
knx_module.xknx,
name=config[CONF_ENTITY][CONF_NAME],
localtime=False,
group_address=knx_conf.get_write(CONF_GA_TIME),
group_address_state=knx_conf.get_state_and_passive(CONF_GA_TIME),
respond_to_read=knx_conf.get(CONF_RESPOND_TO_READ),
sync_state=knx_conf.get(CONF_SYNC_STATE),
)

View File

@@ -35,6 +35,7 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import CONF_INSTALLATION_KEY, CONF_USE_BLUETOOTH, DOMAIN
from .coordinator import (
LaMarzoccoBluetoothUpdateCoordinator,
LaMarzoccoConfigEntry,
LaMarzoccoConfigUpdateCoordinator,
LaMarzoccoRuntimeData,
@@ -72,38 +73,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
client=create_client_session(hass),
)
try:
settings = await cloud_client.get_thing_settings(serial)
except AuthFail as ex:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="authentication_failed"
) from ex
except (RequestNotSuccessful, TimeoutError) as ex:
_LOGGER.debug(ex, exc_info=True)
raise ConfigEntryNotReady(
translation_domain=DOMAIN, translation_key="api_error"
) from ex
gateway_version = version.parse(
settings.firmwares[FirmwareType.GATEWAY].build_version
)
if gateway_version < version.parse("v5.0.9"):
# incompatible gateway firmware, create an issue
ir.async_create_issue(
hass,
DOMAIN,
"unsupported_gateway_firmware",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
translation_key="unsupported_gateway_firmware",
translation_placeholders={"gateway_version": str(gateway_version)},
)
# initialize Bluetooth
bluetooth_client: LaMarzoccoBluetoothClient | None = None
if entry.options.get(CONF_USE_BLUETOOTH, True) and (
token := (entry.data.get(CONF_TOKEN) or settings.ble_auth_token)
token := entry.data.get(CONF_TOKEN)
):
if CONF_MAC not in entry.data:
for discovery_info in async_discovered_service_info(hass):
@@ -145,6 +118,44 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
_LOGGER.info(
"Bluetooth device not found during lamarzocco setup, continuing with cloud only"
)
try:
settings = await cloud_client.get_thing_settings(serial)
except AuthFail as ex:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="authentication_failed"
) from ex
except (RequestNotSuccessful, TimeoutError) as ex:
_LOGGER.debug(ex, exc_info=True)
if not bluetooth_client:
raise ConfigEntryNotReady(
translation_domain=DOMAIN, translation_key="api_error"
) from ex
_LOGGER.debug("Cloud failed, continuing with Bluetooth only", exc_info=True)
else:
gateway_version = version.parse(
settings.firmwares[FirmwareType.GATEWAY].build_version
)
if gateway_version < version.parse("v5.0.9"):
# incompatible gateway firmware, create an issue
ir.async_create_issue(
hass,
DOMAIN,
"unsupported_gateway_firmware",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
translation_key="unsupported_gateway_firmware",
translation_placeholders={"gateway_version": str(gateway_version)},
)
# Update BLE Token if exists
if settings.ble_auth_token:
hass.config_entries.async_update_entry(
entry,
data={
**entry.data,
CONF_TOKEN: settings.ble_auth_token,
},
)
device = LaMarzoccoMachine(
serial_number=entry.unique_id,
@@ -153,7 +164,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
)
coordinators = LaMarzoccoRuntimeData(
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, cloud_client),
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
@@ -166,6 +177,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
coordinators.statistics_coordinator.async_config_entry_first_refresh(),
)
# bt coordinator only if bluetooth client is available
# and after the initial refresh of the config coordinator
# to fetch only if the others failed
if bluetooth_client:
bluetooth_coordinator = LaMarzoccoBluetoothUpdateCoordinator(
hass, entry, device
)
await bluetooth_coordinator.async_config_entry_first_refresh()
coordinators.bluetooth_coordinator = bluetooth_coordinator
entry.runtime_data = coordinators
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)

View File

@@ -6,7 +6,7 @@ from typing import cast
from pylamarzocco import LaMarzoccoMachine
from pylamarzocco.const import BackFlushStatus, MachineState, ModelName, WidgetType
from pylamarzocco.models import BackFlush, MachineStatus
from pylamarzocco.models import BackFlush, MachineStatus, NoWater
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -39,8 +39,15 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
key="water_tank",
translation_key="water_tank",
device_class=BinarySensorDeviceClass.PROBLEM,
is_on_fn=lambda machine: WidgetType.CM_NO_WATER in machine.dashboard.config,
is_on_fn=(
lambda machine: cast(
NoWater, machine.dashboard.config[WidgetType.CM_NO_WATER]
).allarm
if WidgetType.CM_NO_WATER in machine.dashboard.config
else False
),
entity_category=EntityCategory.DIAGNOSTIC,
bt_offline_mode=True,
),
LaMarzoccoBinarySensorEntityDescription(
key="brew_active",
@@ -93,7 +100,9 @@ async def async_setup_entry(
coordinator = entry.runtime_data.config_coordinator
async_add_entities(
LaMarzoccoBinarySensorEntity(coordinator, description)
LaMarzoccoBinarySensorEntity(
coordinator, description, entry.runtime_data.bluetooth_coordinator
)
for description in ENTITIES
if description.supported_fn(coordinator)
)

View File

@@ -10,8 +10,12 @@ from datetime import timedelta
import logging
from typing import Any
from pylamarzocco import LaMarzoccoCloudClient, LaMarzoccoMachine
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
from pylamarzocco import LaMarzoccoMachine
from pylamarzocco.exceptions import (
AuthFail,
BluetoothConnectionFailed,
RequestNotSuccessful,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
@@ -36,6 +40,7 @@ class LaMarzoccoRuntimeData:
settings_coordinator: LaMarzoccoSettingsUpdateCoordinator
schedule_coordinator: LaMarzoccoScheduleUpdateCoordinator
statistics_coordinator: LaMarzoccoStatisticsUpdateCoordinator
bluetooth_coordinator: LaMarzoccoBluetoothUpdateCoordinator | None = None
type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoRuntimeData]
@@ -46,14 +51,13 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
_default_update_interval = SCAN_INTERVAL
config_entry: LaMarzoccoConfigEntry
_websocket_task: Task | None = None
update_success = False
def __init__(
self,
hass: HomeAssistant,
entry: LaMarzoccoConfigEntry,
device: LaMarzoccoMachine,
cloud_client: LaMarzoccoCloudClient | None = None,
) -> None:
"""Initialize coordinator."""
super().__init__(
@@ -64,7 +68,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
update_interval=self._default_update_interval,
)
self.device = device
self.cloud_client = cloud_client
self._websocket_task: Task | None = None
@property
def websocket_terminated(self) -> bool:
@@ -81,14 +85,28 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
await func()
except AuthFail as ex:
_LOGGER.debug("Authentication failed", exc_info=True)
self.update_success = False
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="authentication_failed"
) from ex
except RequestNotSuccessful as ex:
_LOGGER.debug(ex, exc_info=True)
self.update_success = False
# if no bluetooth coordinator, this is a fatal error
# otherwise, bluetooth may still work
if not self.device.bluetooth_client_available:
raise UpdateFailed(
translation_domain=DOMAIN, translation_key="api_error"
) from ex
except BluetoothConnectionFailed as err:
self.update_success = False
raise UpdateFailed(
translation_domain=DOMAIN, translation_key="api_error"
) from ex
translation_domain=DOMAIN,
translation_key="bluetooth_connection_failed",
) from err
else:
self.update_success = True
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
async def _async_setup(self) -> None:
"""Set up coordinator."""
@@ -109,11 +127,9 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
"""Class to handle fetching data from the La Marzocco API centrally."""
cloud_client: LaMarzoccoCloudClient
async def _internal_async_setup(self) -> None:
"""Set up the coordinator."""
await self.cloud_client.async_get_access_token()
await self.device.ensure_token_valid()
await self.device.get_dashboard()
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
@@ -121,7 +137,7 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
"""Fetch data from API endpoint."""
# ensure token stays valid; does nothing if token is still valid
await self.cloud_client.async_get_access_token()
await self.device.ensure_token_valid()
# Only skip websocket reconnection if it's currently connected and the task is still running
if self.device.websocket.connected and not self.websocket_terminated:
@@ -193,3 +209,19 @@ class LaMarzoccoStatisticsUpdateCoordinator(LaMarzoccoUpdateCoordinator):
"""Fetch data from API endpoint."""
await self.device.get_coffee_and_flush_counter()
_LOGGER.debug("Current statistics: %s", self.device.statistics.to_dict())
class LaMarzoccoBluetoothUpdateCoordinator(LaMarzoccoUpdateCoordinator):
"""Class to handle fetching data from the La Marzocco Bluetooth API centrally."""
async def _internal_async_setup(self) -> None:
"""Initial setup for Bluetooth coordinator."""
await self.device.get_model_info_from_bluetooth()
async def _internal_async_update_data(self) -> None:
"""Fetch data from Bluetooth endpoint."""
# if the websocket is connected and the machine is connected to the cloud
# skip bluetooth update, because we get push updates
if self.device.websocket.connected and self.device.dashboard.connected:
return
await self.device.get_dashboard_from_bluetooth()

View File

@@ -17,7 +17,10 @@ from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import LaMarzoccoUpdateCoordinator
from .coordinator import (
LaMarzoccoBluetoothUpdateCoordinator,
LaMarzoccoUpdateCoordinator,
)
@dataclass(frozen=True, kw_only=True)
@@ -26,6 +29,7 @@ class LaMarzoccoEntityDescription(EntityDescription):
available_fn: Callable[[LaMarzoccoUpdateCoordinator], bool] = lambda _: True
supported_fn: Callable[[LaMarzoccoUpdateCoordinator], bool] = lambda _: True
bt_offline_mode: bool = False
class LaMarzoccoBaseEntity(
@@ -45,14 +49,19 @@ class LaMarzoccoBaseEntity(
super().__init__(coordinator)
device = coordinator.device
self._attr_unique_id = f"{device.serial_number}_{key}"
sw_version = (
device.settings.firmwares[FirmwareType.MACHINE].build_version
if FirmwareType.MACHINE in device.settings.firmwares
else None
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.serial_number)},
name=device.dashboard.name,
name=device.dashboard.name or self.coordinator.config_entry.title,
manufacturer="La Marzocco",
model=device.dashboard.model_name.value,
model_id=device.dashboard.model_code.value,
serial_number=device.serial_number,
sw_version=device.settings.firmwares[FirmwareType.MACHINE].build_version,
sw_version=sw_version,
)
connections: set[tuple[str, str]] = set()
if coordinator.config_entry.data.get(CONF_ADDRESS):
@@ -77,8 +86,12 @@ class LaMarzoccoBaseEntity(
if WidgetType.CM_MACHINE_STATUS in self.coordinator.device.dashboard.config
else MachineState.OFF
)
return super().available and not (
self._unavailable_when_machine_off and machine_state is MachineState.OFF
return (
super().available
and not (
self._unavailable_when_machine_off and machine_state is MachineState.OFF
)
and self.coordinator.update_success
)
@@ -90,6 +103,11 @@ class LaMarzoccoEntity(LaMarzoccoBaseEntity):
@property
def available(self) -> bool:
"""Return True if entity is available."""
if (
self.entity_description.bt_offline_mode
and self.bluetooth_coordinator is not None
):
return self.bluetooth_coordinator.last_update_success
if super().available:
return self.entity_description.available_fn(self.coordinator)
return False
@@ -98,7 +116,17 @@ class LaMarzoccoEntity(LaMarzoccoBaseEntity):
self,
coordinator: LaMarzoccoUpdateCoordinator,
entity_description: LaMarzoccoEntityDescription,
bluetooth_coordinator: LaMarzoccoBluetoothUpdateCoordinator | None = None,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator, entity_description.key)
self.entity_description = entity_description
self.bluetooth_coordinator = bluetooth_coordinator
async def async_added_to_hass(self) -> None:
"""Handle when entity is added to hass."""
await super().async_added_to_hass()
if self.bluetooth_coordinator is not None:
self.async_on_remove(
self.bluetooth_coordinator.async_add_listener(self.async_write_ha_state)
)

View File

@@ -58,6 +58,7 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
CoffeeBoiler, machine.dashboard.config[WidgetType.CM_COFFEE_BOILER]
).target_temperature
),
bt_offline_mode=True,
),
LaMarzoccoNumberEntityDescription(
key="steam_temp",
@@ -78,6 +79,7 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
lambda coordinator: coordinator.device.dashboard.model_name
in (ModelName.GS3_AV, ModelName.GS3_MP)
),
bt_offline_mode=True,
),
LaMarzoccoNumberEntityDescription(
key="smart_standby_time",
@@ -96,6 +98,7 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
)
),
native_value_fn=lambda machine: machine.schedule.smart_wake_up_sleep.smart_stand_by_minutes,
bt_offline_mode=True,
),
LaMarzoccoNumberEntityDescription(
key="preinfusion_off",
@@ -226,13 +229,14 @@ async def async_setup_entry(
) -> None:
"""Set up number entities."""
coordinator = entry.runtime_data.config_coordinator
entities: list[NumberEntity] = [
LaMarzoccoNumberEntity(coordinator, description)
async_add_entities(
LaMarzoccoNumberEntity(
coordinator, description, entry.runtime_data.bluetooth_coordinator
)
for description in ENTITIES
if description.supported_fn(coordinator)
]
async_add_entities(entities)
)
class LaMarzoccoNumberEntity(LaMarzoccoEntity, NumberEntity):

View File

@@ -80,6 +80,7 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = (
lambda coordinator: coordinator.device.dashboard.model_name
in (ModelName.LINEA_MINI_R, ModelName.LINEA_MICRA)
),
bt_offline_mode=True,
),
LaMarzoccoSelectEntityDescription(
key="prebrew_infusion_select",
@@ -128,7 +129,9 @@ async def async_setup_entry(
coordinator = entry.runtime_data.config_coordinator
async_add_entities(
LaMarzoccoSelectEntity(coordinator, description)
LaMarzoccoSelectEntity(
coordinator, description, entry.runtime_data.bluetooth_coordinator
)
for description in ENTITIES
if description.supported_fn(coordinator)
)

View File

@@ -183,6 +183,9 @@
"auto_on_off_error": {
"message": "Error while setting auto on/off to {state} for {id}"
},
"bluetooth_connection_failed": {
"message": "Error while connecting to machine via Bluetooth"
},
"button_error": {
"message": "Error while executing button {key}"
},

View File

@@ -50,6 +50,7 @@ ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = (
).mode
is MachineMode.BREWING_MODE
),
bt_offline_mode=True,
),
LaMarzoccoSwitchEntityDescription(
key="steam_boiler_enable",
@@ -65,6 +66,7 @@ ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = (
lambda coordinator: coordinator.device.dashboard.model_name
in (ModelName.LINEA_MINI_R, ModelName.LINEA_MICRA)
),
bt_offline_mode=True,
),
LaMarzoccoSwitchEntityDescription(
key="steam_boiler_enable",
@@ -80,6 +82,7 @@ ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = (
lambda coordinator: coordinator.device.dashboard.model_name
not in (ModelName.LINEA_MINI_R, ModelName.LINEA_MICRA)
),
bt_offline_mode=True,
),
LaMarzoccoSwitchEntityDescription(
key="smart_standby_enabled",
@@ -91,6 +94,7 @@ ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = (
minutes=machine.schedule.smart_wake_up_sleep.smart_stand_by_minutes,
),
is_on_fn=lambda machine: machine.schedule.smart_wake_up_sleep.smart_stand_by_enabled,
bt_offline_mode=True,
),
)
@@ -106,7 +110,9 @@ async def async_setup_entry(
entities: list[SwitchEntity] = []
entities.extend(
LaMarzoccoSwitchEntity(coordinator, description)
LaMarzoccoSwitchEntity(
coordinator, description, entry.runtime_data.bluetooth_coordinator
)
for description in ENTITIES
if description.supported_fn(coordinator)
)

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_push",
"loggers": ["letpot"],
"quality_scale": "silver",
"requirements": ["letpot==0.6.3"]
"requirements": ["letpot==0.6.4"]
}

View File

@@ -0,0 +1 @@
"""Virtual integration: Levoit."""

View File

@@ -0,0 +1,6 @@
{
"domain": "levoit",
"name": "Levoit",
"integration_type": "virtual",
"supported_by": "vesync"
}

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from datetime import timedelta
import logging
from types import MappingProxyType
from librehardwaremonitor_api import (
LibreHardwareMonitorClient,
@@ -55,15 +54,11 @@ class LibreHardwareMonitorCoordinator(DataUpdateCoordinator[LibreHardwareMonitor
device_entries: list[DeviceEntry] = dr.async_entries_for_config_entry(
registry=dr.async_get(self.hass), config_entry_id=config_entry.entry_id
)
self._previous_devices: MappingProxyType[DeviceId, DeviceName] = (
MappingProxyType(
{
DeviceId(next(iter(device.identifiers))[1]): DeviceName(device.name)
for device in device_entries
if device.identifiers and device.name
}
)
)
self._previous_devices: dict[DeviceId, DeviceName] = {
DeviceId(next(iter(device.identifiers))[1]): DeviceName(device.name)
for device in device_entries
if device.identifiers and device.name
}
async def _async_update_data(self) -> LibreHardwareMonitorData:
try:
@@ -75,7 +70,9 @@ class LibreHardwareMonitorCoordinator(DataUpdateCoordinator[LibreHardwareMonitor
except LibreHardwareMonitorNoDevicesError as err:
raise UpdateFailed("No sensor data available, will retry") from err
await self._async_handle_changes_in_devices(lhm_data.main_device_ids_and_names)
await self._async_handle_changes_in_devices(
dict(lhm_data.main_device_ids_and_names)
)
return lhm_data
@@ -92,18 +89,21 @@ class LibreHardwareMonitorCoordinator(DataUpdateCoordinator[LibreHardwareMonitor
)
async def _async_handle_changes_in_devices(
self, detected_devices: MappingProxyType[DeviceId, DeviceName]
self, detected_devices: dict[DeviceId, DeviceName]
) -> None:
"""Handle device changes by deleting devices from / adding devices to Home Assistant."""
detected_devices = {
DeviceId(f"{self.config_entry.entry_id}_{detected_id}"): device_name
for detected_id, device_name in detected_devices.items()
}
previous_device_ids = set(self._previous_devices.keys())
detected_device_ids = set(detected_devices.keys())
if previous_device_ids == detected_device_ids:
return
_LOGGER.debug("Previous device_ids: %s", previous_device_ids)
_LOGGER.debug("Detected device_ids: %s", detected_device_ids)
if self.data is None:
# initial update during integration startup
self._previous_devices = detected_devices # type: ignore[unreachable]
if previous_device_ids == detected_device_ids:
return
if orphaned_devices := previous_device_ids - detected_device_ids:
@@ -114,13 +114,21 @@ class LibreHardwareMonitorCoordinator(DataUpdateCoordinator[LibreHardwareMonitor
device_registry = dr.async_get(self.hass)
for device_id in orphaned_devices:
if device := device_registry.async_get_device(
identifiers={(DOMAIN, f"{self.config_entry.entry_id}_{device_id}")}
identifiers={(DOMAIN, device_id)}
):
_LOGGER.debug(
"Removing device: %s", self._previous_devices[device_id]
)
device_registry.async_update_device(
device_id=device.id,
remove_config_entry_id=self.config_entry.entry_id,
)
if self.data is None:
# initial update during integration startup
self._previous_devices = detected_devices # type: ignore[unreachable]
return
if new_devices := detected_device_ids - previous_device_ids:
_LOGGER.warning(
"New Device(s) detected, reload integration to add them to Home Assistant: %s",

View File

@@ -19,5 +19,5 @@
"documentation": "https://www.home-assistant.io/integrations/nest",
"iot_class": "cloud_push",
"loggers": ["google_nest_sdm"],
"requirements": ["google-nest-sdm==9.1.1"]
"requirements": ["google-nest-sdm==9.1.2"]
}

View File

@@ -4,7 +4,7 @@
"codeowners": ["@gjohansson-ST"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/nordpool",
"integration_type": "hub",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["pynordpool"],
"quality_scale": "platinum",

View File

@@ -9,7 +9,7 @@
"iot_class": "local_push",
"loggers": ["aioonkyo"],
"quality_scale": "bronze",
"requirements": ["aioonkyo==0.3.0"],
"requirements": ["aioonkyo==0.4.0"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",

View File

@@ -10,6 +10,7 @@
"config_flow": true,
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/oralb",
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["oralb_ble"],
"requirements": ["oralb-ble==0.17.6"]

View File

@@ -162,7 +162,7 @@ class PingDataSubProcess(PingData):
if pinger:
with suppress(TypeError, ProcessLookupError):
await pinger.kill() # type: ignore[func-returns-value]
pinger.kill()
del pinger
return None

View File

@@ -17,7 +17,12 @@ from .coordinator import PooldoseConfigEntry, PooldoseCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.NUMBER,
Platform.SENSOR,
Platform.SWITCH,
]
async def async_migrate_entry(hass: HomeAssistant, entry: PooldoseConfigEntry) -> bool:

View File

@@ -68,6 +68,35 @@
}
}
},
"number": {
"cl_target": {
"default": "mdi:pool"
},
"ofa_cl_lower": {
"default": "mdi:arrow-down-bold"
},
"ofa_cl_upper": {
"default": "mdi:arrow-up-bold"
},
"ofa_orp_lower": {
"default": "mdi:arrow-down-bold"
},
"ofa_orp_upper": {
"default": "mdi:arrow-up-bold"
},
"ofa_ph_lower": {
"default": "mdi:arrow-down-bold"
},
"ofa_ph_upper": {
"default": "mdi:arrow-up-bold"
},
"orp_target": {
"default": "mdi:water-check"
},
"ph_target": {
"default": "mdi:ph"
}
},
"sensor": {
"cl": {
"default": "mdi:pool"

View File

@@ -0,0 +1,142 @@
"""Number entities for the Seko PoolDose integration."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, cast
from homeassistant.components.number import (
NumberDeviceClass,
NumberEntity,
NumberEntityDescription,
)
from homeassistant.const import (
CONCENTRATION_PARTS_PER_MILLION,
EntityCategory,
UnitOfElectricPotential,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PooldoseConfigEntry
from .entity import PooldoseEntity
if TYPE_CHECKING:
from .coordinator import PooldoseCoordinator
_LOGGER = logging.getLogger(__name__)
NUMBER_DESCRIPTIONS: tuple[NumberEntityDescription, ...] = (
NumberEntityDescription(
key="ph_target",
translation_key="ph_target",
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.PH,
),
NumberEntityDescription(
key="orp_target",
translation_key="orp_target",
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.VOLTAGE,
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
),
NumberEntityDescription(
key="cl_target",
translation_key="cl_target",
entity_category=EntityCategory.CONFIG,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
),
NumberEntityDescription(
key="ofa_ph_lower",
translation_key="ofa_ph_lower",
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.PH,
),
NumberEntityDescription(
key="ofa_ph_upper",
translation_key="ofa_ph_upper",
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.PH,
),
NumberEntityDescription(
key="ofa_orp_lower",
translation_key="ofa_orp_lower",
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.VOLTAGE,
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
),
NumberEntityDescription(
key="ofa_orp_upper",
translation_key="ofa_orp_upper",
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.VOLTAGE,
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
),
NumberEntityDescription(
key="ofa_cl_lower",
translation_key="ofa_cl_lower",
entity_category=EntityCategory.CONFIG,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
),
NumberEntityDescription(
key="ofa_cl_upper",
translation_key="ofa_cl_upper",
entity_category=EntityCategory.CONFIG,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: PooldoseConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up PoolDose number entities from a config entry."""
if TYPE_CHECKING:
assert config_entry.unique_id is not None
coordinator = config_entry.runtime_data
number_data = coordinator.data.get("number", {})
serial_number = config_entry.unique_id
async_add_entities(
PooldoseNumber(coordinator, serial_number, coordinator.device_info, description)
for description in NUMBER_DESCRIPTIONS
if description.key in number_data
)
class PooldoseNumber(PooldoseEntity, NumberEntity):
"""Number entity for the Seko PoolDose Python API."""
def __init__(
self,
coordinator: PooldoseCoordinator,
serial_number: str,
device_info: Any,
description: NumberEntityDescription,
) -> None:
"""Initialize the number."""
super().__init__(coordinator, serial_number, device_info, description, "number")
self._async_update_attrs()
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._async_update_attrs()
super()._handle_coordinator_update()
def _async_update_attrs(self) -> None:
"""Update number attributes."""
data = cast(dict, self.get_data())
self._attr_native_value = data["value"]
self._attr_native_min_value = data["min"]
self._attr_native_max_value = data["max"]
self._attr_native_step = data["step"]
async def async_set_native_value(self, value: float) -> None:
"""Set new value."""
await self.coordinator.client.set_number(self.entity_description.key, value)
self._attr_native_value = value
self.async_write_ha_state()

View File

@@ -68,6 +68,35 @@
"name": "Auxiliary relay 3 status"
}
},
"number": {
"cl_target": {
"name": "Chlorine target"
},
"ofa_cl_lower": {
"name": "Chlorine overfeed alarm lower limit"
},
"ofa_cl_upper": {
"name": "Chlorine overfeed alarm upper limit"
},
"ofa_orp_lower": {
"name": "ORP overfeed alarm lower limit"
},
"ofa_orp_upper": {
"name": "ORP overfeed alarm upper limit"
},
"ofa_ph_lower": {
"name": "pH overfeed alarm lower limit"
},
"ofa_ph_upper": {
"name": "pH overfeed alarm upper limit"
},
"orp_target": {
"name": "ORP target"
},
"ph_target": {
"name": "pH target"
}
},
"sensor": {
"cl": {
"name": "Chlorine"

View File

@@ -20,7 +20,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==3.8.4",
"python-roborock==3.9.2",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -1,6 +1,5 @@
"""Roborock storage."""
import dataclasses
import logging
from pathlib import Path
import shutil
@@ -17,7 +16,7 @@ _LOGGER = logging.getLogger(__name__)
STORAGE_PATH = f".storage/{DOMAIN}"
MAPS_PATH = "maps"
CACHE_VERSION = 1
CACHE_VERSION = 2
def _storage_path_prefix(hass: HomeAssistant, entry_id: str) -> Path:
@@ -44,6 +43,31 @@ async def async_cleanup_map_storage(hass: HomeAssistant, entry_id: str) -> None:
await hass.async_add_executor_job(remove, path_prefix)
class StoreImpl(Store[dict[str, Any]]):
"""Store implementation for Roborock cache."""
def __init__(self, hass: HomeAssistant, entry_id: str) -> None:
"""Initialize StoreImpl."""
super().__init__(
hass,
version=CACHE_VERSION,
key=f"{DOMAIN}/{entry_id}",
private=True,
)
async def _async_migrate_func(
self,
old_major_version: int,
old_minor_version: int,
old_data: dict[str, Any],
) -> dict[str, Any]:
"""Wipe out old caches with the old format."""
if old_major_version == 1:
# No need for migration as version 1 was never in any stable releases
return {}
return old_data
class CacheStore(Cache):
"""Store and retrieve cache for a Roborock device.
@@ -55,19 +79,14 @@ class CacheStore(Cache):
def __init__(self, hass: HomeAssistant, entry_id: str) -> None:
"""Initialize CacheStore."""
self._cache_store = Store[dict[str, Any]](
hass,
version=CACHE_VERSION,
key=f"{DOMAIN}/{entry_id}",
private=True,
)
self._cache_store = StoreImpl(hass, entry_id)
self._cache_data: CacheData | None = None
async def get(self) -> CacheData:
"""Retrieve cached metadata."""
if self._cache_data is None:
if data := await self._cache_store.async_load():
self._cache_data = CacheData(**data)
self._cache_data = CacheData.from_dict(data)
else:
self._cache_data = CacheData()
@@ -80,7 +99,7 @@ class CacheStore(Cache):
async def flush(self) -> None:
"""Flush cached metadata to disk."""
if self._cache_data is not None:
await self._cache_store.async_save(dataclasses.asdict(self._cache_data))
await self._cache_store.async_save(self._cache_data.as_dict())
async def async_remove(self) -> None:
"""Remove cached metadata from disk."""

View File

@@ -60,7 +60,7 @@
},
"dsl_connect_count": {
"name": "DSL connect count",
"unit_of_measurement": "connects"
"unit_of_measurement": "attempts"
},
"dsl_crc_error_count": {
"name": "DSL CRC error count",

View File

@@ -1157,6 +1157,17 @@ CAPABILITY_TO_SENSORS: dict[
)
]
},
Capability.SAMSUNG_CE_HOOD_FILTER: {
Attribute.HOOD_FILTER_USAGE: [
SmartThingsSensorEntityDescription(
key=Attribute.HOOD_FILTER_USAGE,
translation_key="hood_filter_usage",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
)
]
},
}

View File

@@ -411,6 +411,9 @@
"simmer": "Simmer"
}
},
"hood_filter_usage": {
"name": "Filter usage"
},
"infrared_level": {
"name": "Infrared level"
},

View File

@@ -5,6 +5,7 @@
"config_flow": true,
"dependencies": ["http", "webhook"],
"documentation": "https://www.home-assistant.io/integrations/tedee",
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["aiotedee"],
"quality_scale": "platinum",

View File

@@ -1,30 +1,20 @@
"""Support for Tibber."""
from __future__ import annotations
from dataclasses import dataclass
import logging
import aiohttp
from aiohttp.client_exceptions import ClientError, ClientResponseError
import tibber
from tibber import data_api as tibber_data_api
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import Event, HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import (
ImplementationUnavailableError,
OAuth2Session,
async_get_config_entry_implementation,
)
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util, ssl as ssl_util
from .const import AUTH_IMPLEMENTATION, DATA_HASS_CONFIG, DOMAIN
from .const import DATA_HASS_CONFIG, DOMAIN
from .services import async_setup_services
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
@@ -34,34 +24,6 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
_LOGGER = logging.getLogger(__name__)
@dataclass(slots=True)
class TibberRuntimeData:
"""Runtime data for Tibber API entries."""
tibber_connection: tibber.Tibber
session: OAuth2Session | None = None
_client: tibber_data_api.TibberDataAPI | None = None
async def async_get_client(
self, hass: HomeAssistant
) -> tibber_data_api.TibberDataAPI:
"""Return an authenticated Tibber Data API client."""
if self.session is None:
raise ConfigEntryAuthFailed("OAuth session not available")
await self.session.async_ensure_token_valid()
token = self.session.token
access_token = token.get(CONF_ACCESS_TOKEN)
if not access_token:
raise ConfigEntryAuthFailed("Access token missing from OAuth session")
if self._client is None:
self._client = tibber_data_api.TibberDataAPI(
access_token,
websession=async_get_clientsession(hass),
)
self._client.set_access_token(access_token)
return self._client
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Tibber component."""
@@ -75,19 +37,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
if AUTH_IMPLEMENTATION not in entry.data:
entry.async_start_reauth(hass)
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="data_api_reauth_required",
)
tibber_connection = tibber.Tibber(
access_token=entry.data[CONF_ACCESS_TOKEN],
websession=async_get_clientsession(hass),
time_zone=dt_util.get_default_time_zone(),
ssl=ssl_util.get_default_context(),
)
hass.data[DOMAIN] = tibber_connection
async def _close(event: Event) -> None:
await tibber_connection.rt_disconnect()
@@ -96,6 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
try:
await tibber_connection.update_info()
except (
TimeoutError,
aiohttp.ClientError,
@@ -108,32 +65,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
except tibber.FatalHttpExceptionError:
return False
try:
implementation = await async_get_config_entry_implementation(hass, entry)
except ImplementationUnavailableError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="oauth2_implementation_unavailable",
) from err
session = OAuth2Session(hass, entry, implementation)
try:
await session.async_ensure_token_valid()
except ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(
"OAuth session is not valid, reauthentication required"
) from err
raise ConfigEntryNotReady from err
except ClientError as err:
raise ConfigEntryNotReady from err
hass.data[DOMAIN] = TibberRuntimeData(
tibber_connection=tibber_connection,
session=session,
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
@@ -143,6 +76,6 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
config_entry, PLATFORMS
)
if unload_ok:
if runtime := hass.data.pop(DOMAIN, None):
await runtime.tibber_connection.rt_disconnect()
tibber_connection = hass.data[DOMAIN]
await tibber_connection.rt_disconnect()
return unload_ok

View File

@@ -1,15 +0,0 @@
"""Application credentials platform for Tibber."""
from homeassistant.components.application_credentials import AuthorizationServer
from homeassistant.core import HomeAssistant
AUTHORIZE_URL = "https://thewall.tibber.com/connect/authorize"
TOKEN_URL = "https://thewall.tibber.com/connect/token"
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
"""Return authorization server for Tibber Data API."""
return AuthorizationServer(
authorize_url=AUTHORIZE_URL,
token_url=TOKEN_URL,
)

View File

@@ -2,21 +2,17 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
import aiohttp
import tibber
from tibber import data_api as tibber_data_api
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
from .const import AUTH_IMPLEMENTATION, DATA_API_DEFAULT_SCOPES, DOMAIN
from .const import DOMAIN
DATA_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str})
ERR_TIMEOUT = "timeout"
@@ -24,145 +20,62 @@ ERR_CLIENT = "cannot_connect"
ERR_TOKEN = "invalid_access_token"
TOKEN_URL = "https://developer.tibber.com/settings/access-token"
_LOGGER = logging.getLogger(__name__)
class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
class TibberConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Tibber integration."""
VERSION = 1
DOMAIN = DOMAIN
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self._access_token: str | None = None
self._title = ""
@property
def logger(self) -> logging.Logger:
"""Return the logger."""
return _LOGGER
@property
def extra_authorize_data(self) -> dict[str, Any]:
"""Extra data appended to the authorize URL."""
return {
**super().extra_authorize_data,
"scope": " ".join(DATA_API_DEFAULT_SCOPES),
}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
if user_input is None:
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_ACCESS_TOKEN: self._access_token or ""}
self._async_abort_entries_match()
if user_input is not None:
access_token = user_input[CONF_ACCESS_TOKEN].replace(" ", "")
tibber_connection = tibber.Tibber(
access_token=access_token,
websession=async_get_clientsession(self.hass),
)
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors={},
)
errors = {}
self._access_token = user_input[CONF_ACCESS_TOKEN].replace(" ", "")
tibber_connection = tibber.Tibber(
access_token=self._access_token,
websession=async_get_clientsession(self.hass),
)
self._title = tibber_connection.name or "Tibber"
try:
await tibber_connection.update_info()
except TimeoutError:
errors[CONF_ACCESS_TOKEN] = ERR_TIMEOUT
except tibber.InvalidLoginError:
errors[CONF_ACCESS_TOKEN] = ERR_TOKEN
except (
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
tibber.FatalHttpExceptionError,
):
errors[CONF_ACCESS_TOKEN] = ERR_CLIENT
errors: dict[str, str] = {}
try:
await tibber_connection.update_info()
except TimeoutError:
errors[CONF_ACCESS_TOKEN] = ERR_TIMEOUT
except tibber.InvalidLoginError:
errors[CONF_ACCESS_TOKEN] = ERR_TOKEN
except (
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
tibber.FatalHttpExceptionError,
):
errors[CONF_ACCESS_TOKEN] = ERR_CLIENT
if errors:
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
description_placeholders={"url": TOKEN_URL},
errors=errors,
)
if errors:
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_ACCESS_TOKEN: self._access_token or ""}
)
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors=errors,
)
await self.async_set_unique_id(tibber_connection.user_id)
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
self._abort_if_unique_id_mismatch(
reason="wrong_account",
description_placeholders={"title": reauth_entry.title},
)
else:
unique_id = tibber_connection.user_id
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
self._async_abort_entries_match({AUTH_IMPLEMENTATION: DOMAIN})
return await self.async_step_pick_implementation()
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle a reauth flow."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_ACCESS_TOKEN)
self._title = reauth_entry.title
if reauth_entry.unique_id is not None:
await self.async_set_unique_id(reauth_entry.unique_id)
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication by reusing the user step."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_ACCESS_TOKEN)
self._title = reauth_entry.title
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
return self.async_create_entry(
title=tibber_connection.name,
data={CONF_ACCESS_TOKEN: access_token},
)
return await self.async_step_user()
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Finalize the OAuth flow and create the config entry."""
if self._access_token is None:
return self.async_abort(reason="missing_configuration")
data[CONF_ACCESS_TOKEN] = self._access_token
access_token = data[CONF_TOKEN][CONF_ACCESS_TOKEN]
data_api_client = tibber_data_api.TibberDataAPI(
access_token,
websession=async_get_clientsession(self.hass),
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
description_placeholders={"url": TOKEN_URL},
errors={},
)
try:
await data_api_client.get_userinfo()
except (aiohttp.ClientError, TimeoutError):
return self.async_abort(reason="cannot_connect")
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
return self.async_update_reload_and_abort(
reauth_entry,
data=data,
title=self._title,
)
return self.async_create_entry(title=self._title, data=data)

View File

@@ -1,19 +1,5 @@
"""Constants for Tibber integration."""
AUTH_IMPLEMENTATION = "auth_implementation"
DATA_HASS_CONFIG = "tibber_hass_config"
DOMAIN = "tibber"
MANUFACTURER = "Tibber"
DATA_API_DEFAULT_SCOPES = [
"openid",
"profile",
"email",
"offline_access",
"data-api-user-read",
"data-api-chargers-read",
"data-api-energy-systems-read",
"data-api-homes-read",
"data-api-thermostats-read",
"data-api-vehicles-read",
"data-api-inverters-read",
]

View File

@@ -4,14 +4,9 @@ from __future__ import annotations
from datetime import timedelta
import logging
from typing import TYPE_CHECKING, cast
from typing import cast
from aiohttp.client_exceptions import ClientError
import tibber
from tibber.data_api import TibberDataAPI, TibberDevice
if TYPE_CHECKING:
from . import TibberRuntimeData
from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.models import (
@@ -27,7 +22,6 @@ from homeassistant.components.recorder.statistics import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from homeassistant.util.unit_conversion import EnergyConverter
@@ -193,45 +187,3 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
unit_of_measurement=unit,
)
async_add_external_statistics(self.hass, metadata, statistics)
class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
"""Fetch and cache Tibber Data API device capabilities."""
def __init__(
self,
hass: HomeAssistant,
entry: ConfigEntry,
runtime_data: TibberRuntimeData,
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
name=f"{DOMAIN} Data API",
update_interval=timedelta(minutes=1),
config_entry=entry,
)
self._runtime_data = runtime_data
async def _async_get_client(self) -> TibberDataAPI:
"""Get the Tibber Data API client with error handling."""
try:
return await self._runtime_data.async_get_client(self.hass)
except ConfigEntryAuthFailed:
raise
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
raise UpdateFailed(
f"Unable to create Tibber Data API client: {err}"
) from err
async def _async_setup(self) -> None:
"""Initial load of Tibber Data API devices."""
client = await self._async_get_client()
self.data = await client.get_all_devices()
async def _async_update_data(self) -> dict[str, TibberDevice]:
"""Fetch the latest device capabilities from the Tibber Data API."""
client = await self._async_get_client()
devices: dict[str, TibberDevice] = await client.update_devices()
return devices

View File

@@ -2,30 +2,23 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any, cast
from typing import Any
import aiohttp
import tibber
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from .const import DOMAIN
if TYPE_CHECKING:
from . import TibberRuntimeData
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
tibber_connection: tibber.Tibber = hass.data[DOMAIN]
runtime = cast("TibberRuntimeData | None", hass.data.get(DOMAIN))
if runtime is None:
return {"homes": []}
result: dict[str, Any] = {
return {
"homes": [
{
"last_data_timestamp": home.last_data_timestamp,
@@ -34,38 +27,6 @@ async def async_get_config_entry_diagnostics(
"last_cons_data_timestamp": home.last_cons_data_timestamp,
"country": home.country,
}
for home in runtime.tibber_connection.get_homes(only_active=False)
for home in tibber_connection.get_homes(only_active=False)
]
}
if runtime.session:
devices: dict[str, Any] = {}
error: str | None = None
try:
client = await runtime.async_get_client(hass)
devices = await client.get_all_devices()
except ConfigEntryAuthFailed:
error = "Authentication failed"
except TimeoutError:
error = "Timeout error"
except aiohttp.ClientError:
error = "Client error"
except tibber.InvalidLoginError:
error = "Invalid login"
except tibber.RetryableHttpExceptionError as err:
error = f"Retryable HTTP error ({err.status})"
except tibber.FatalHttpExceptionError as err:
error = f"Fatal HTTP error ({err.status})"
result["error"] = error
result["devices"] = [
{
"id": device.id,
"name": device.name,
"brand": device.brand,
"model": device.model,
}
for device in devices.values()
]
return result

View File

@@ -3,9 +3,9 @@
"name": "Tibber",
"codeowners": ["@danielhiversen"],
"config_flow": true,
"dependencies": ["application_credentials", "recorder"],
"dependencies": ["recorder"],
"documentation": "https://www.home-assistant.io/integrations/tibber",
"iot_class": "cloud_polling",
"loggers": ["tibber"],
"requirements": ["pyTibber==0.33.1"]
"requirements": ["pyTibber==0.32.2"]
}

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
from typing import TYPE_CHECKING, cast
from tibber import Tibber
from homeassistant.components.notify import (
@@ -16,10 +14,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
if TYPE_CHECKING:
from . import TibberRuntimeData
from . import DOMAIN
async def async_setup_entry(
@@ -44,10 +39,7 @@ class TibberNotificationEntity(NotifyEntity):
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a message to Tibber devices."""
runtime = cast("TibberRuntimeData | None", self.hass.data.get(DOMAIN))
if runtime is None:
raise HomeAssistantError("Tibber integration is not initialized")
tibber_connection: Tibber = runtime.tibber_connection
tibber_connection: Tibber = self.hass.data[DOMAIN]
try:
await tibber_connection.send_notification(
title or ATTR_TITLE_DEFAULT, message

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable
import datetime
from datetime import timedelta
@@ -11,8 +10,7 @@ from random import randrange
from typing import Any
import aiohttp
from tibber import FatalHttpExceptionError, RetryableHttpExceptionError, TibberHome
from tibber.data_api import TibberDevice
import tibber
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -29,7 +27,6 @@ from homeassistant.const import (
UnitOfElectricCurrent,
UnitOfElectricPotential,
UnitOfEnergy,
UnitOfLength,
UnitOfPower,
)
from homeassistant.core import Event, HomeAssistant, callback
@@ -45,7 +42,7 @@ from homeassistant.helpers.update_coordinator import (
from homeassistant.util import Throttle, dt as dt_util
from .const import DOMAIN, MANUFACTURER
from .coordinator import TibberDataAPICoordinator, TibberDataCoordinator
from .coordinator import TibberDataCoordinator
_LOGGER = logging.getLogger(__name__)
@@ -263,58 +260,6 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
)
DATA_API_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="storage.stateOfCharge",
translation_key="storage_state_of_charge",
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="storage.targetStateOfCharge",
translation_key="storage_target_state_of_charge",
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="connector.status",
translation_key="connector_status",
device_class=SensorDeviceClass.ENUM,
options=["connected", "disconnected", "unknown"],
),
SensorEntityDescription(
key="charging.status",
translation_key="charging_status",
device_class=SensorDeviceClass.ENUM,
options=["charging", "idle", "unknown"],
),
SensorEntityDescription(
key="range.remaining",
translation_key="range_remaining",
device_class=SensorDeviceClass.DISTANCE,
native_unit_of_measurement=UnitOfLength.KILOMETERS,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
),
SensorEntityDescription(
key="charging.current.max",
translation_key="charging_current_max",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="charging.current.offlineFallback",
translation_key="charging_current_offline_fallback",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
state_class=SensorStateClass.MEASUREMENT,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
@@ -322,23 +267,7 @@ async def async_setup_entry(
) -> None:
"""Set up the Tibber sensor."""
await asyncio.gather(
_async_setup_data_api_sensors(hass, entry, async_add_entities),
_async_setup_graphql_sensors(hass, entry, async_add_entities),
)
async def _async_setup_graphql_sensors(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Tibber sensor."""
runtime = hass.data.get(DOMAIN)
if runtime is None:
raise PlatformNotReady("Tibber runtime is not ready")
tibber_connection = runtime.tibber_connection
tibber_connection = hass.data[DOMAIN]
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
@@ -351,11 +280,7 @@ async def _async_setup_graphql_sensors(
except TimeoutError as err:
_LOGGER.error("Timeout connecting to Tibber home: %s ", err)
raise PlatformNotReady from err
except (
RetryableHttpExceptionError,
FatalHttpExceptionError,
aiohttp.ClientError,
) as err:
except (tibber.RetryableHttpExceptionError, aiohttp.ClientError) as err:
_LOGGER.error("Error connecting to Tibber home: %s ", err)
raise PlatformNotReady from err
@@ -403,93 +328,14 @@ async def _async_setup_graphql_sensors(
async_add_entities(entities, True)
async def _async_setup_data_api_sensors(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up sensors backed by the Tibber Data API."""
runtime = hass.data.get(DOMAIN)
if runtime is None:
raise PlatformNotReady("Tibber runtime is not ready")
coordinator = TibberDataAPICoordinator(hass, entry, runtime)
await coordinator.async_config_entry_first_refresh()
entities: list[TibberDataAPISensor] = []
api_sensors = {sensor.key: sensor for sensor in DATA_API_SENSORS}
for device in coordinator.data.values():
for sensor in device.sensors:
description: SensorEntityDescription | None = api_sensors.get(sensor.id)
if description is None:
_LOGGER.debug(
"Sensor %s not found in DATA_API_SENSORS, skipping", sensor
)
continue
entities.append(
TibberDataAPISensor(
coordinator, device, description, sensor.description
)
)
async_add_entities(entities)
class TibberDataAPISensor(CoordinatorEntity[TibberDataAPICoordinator], SensorEntity):
"""Representation of a Tibber Data API capability sensor."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: TibberDataAPICoordinator,
device: TibberDevice,
entity_description: SensorEntityDescription,
name: str,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self._device_id: str = device.id
self.entity_description = entity_description
self._attr_name = name
self._attr_unique_id = f"{device.external_id}_{self.entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.external_id)},
name=device.name,
manufacturer=device.brand,
model=device.model,
)
@property
def native_value(
self,
) -> StateType:
"""Return the value reported by the device."""
device = self.coordinator.data.get(self._device_id)
if device is None:
return None
for sensor in device.sensors:
if sensor.id == self.entity_description.key:
return sensor.value
return None
@property
def available(self) -> bool:
"""Return whether the sensor is available."""
return super().available and self.native_value is not None
class TibberSensor(SensorEntity):
"""Representation of a generic Tibber sensor."""
_attr_has_entity_name = True
def __init__(self, *args: Any, tibber_home: TibberHome, **kwargs: Any) -> None:
def __init__(
self, *args: Any, tibber_home: tibber.TibberHome, **kwargs: Any
) -> None:
"""Initialize the sensor."""
super().__init__(*args, **kwargs)
self._tibber_home = tibber_home
@@ -520,7 +366,7 @@ class TibberSensorElPrice(TibberSensor):
_attr_state_class = SensorStateClass.MEASUREMENT
_attr_translation_key = "electricity_price"
def __init__(self, tibber_home: TibberHome) -> None:
def __init__(self, tibber_home: tibber.TibberHome) -> None:
"""Initialize the sensor."""
super().__init__(tibber_home=tibber_home)
self._last_updated: datetime.datetime | None = None
@@ -597,7 +443,7 @@ class TibberDataSensor(TibberSensor, CoordinatorEntity[TibberDataCoordinator]):
def __init__(
self,
tibber_home: TibberHome,
tibber_home: tibber.TibberHome,
coordinator: TibberDataCoordinator,
entity_description: SensorEntityDescription,
) -> None:
@@ -624,7 +470,7 @@ class TibberSensorRT(TibberSensor, CoordinatorEntity["TibberRtDataCoordinator"])
def __init__(
self,
tibber_home: TibberHome,
tibber_home: tibber.TibberHome,
description: SensorEntityDescription,
initial_state: float,
coordinator: TibberRtDataCoordinator,
@@ -686,7 +532,7 @@ class TibberRtEntityCreator:
def __init__(
self,
async_add_entities: AddConfigEntryEntitiesCallback,
tibber_home: TibberHome,
tibber_home: tibber.TibberHome,
entity_registry: er.EntityRegistry,
) -> None:
"""Initialize the data handler."""
@@ -772,7 +618,7 @@ class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-en
hass: HomeAssistant,
config_entry: ConfigEntry,
add_sensor_callback: Callable[[TibberRtDataCoordinator, Any], None],
tibber_home: TibberHome,
tibber_home: tibber.TibberHome,
) -> None:
"""Initialize the data handler."""
self._add_sensor_callback = add_sensor_callback

View File

@@ -33,8 +33,7 @@ SERVICE_SCHEMA: Final = vol.Schema(
async def __get_prices(call: ServiceCall) -> ServiceResponse:
runtime = call.hass.data[DOMAIN]
tibber_connection = runtime.tibber_connection
tibber_connection = call.hass.data[DOMAIN]
start = __get_date(call.data.get(ATTR_START), "start")
end = __get_date(call.data.get(ATTR_END), "end")

View File

@@ -1,11 +1,7 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "The connected account does not match {title}. Sign in with the same Tibber account and try again."
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -13,10 +9,6 @@
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
},
"step": {
"reauth_confirm": {
"description": "Reconnect your Tibber account to refresh access.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]"
@@ -48,28 +40,6 @@
"average_power": {
"name": "Average power"
},
"charging_current_max": {
"name": "Maximum charging current"
},
"charging_current_offline_fallback": {
"name": "Offline fallback charging current"
},
"charging_status": {
"name": "Charging status",
"state": {
"charging": "Charging",
"idle": "Idle",
"unknown": "Unknown"
}
},
"connector_status": {
"name": "Connector status",
"state": {
"connected": "Connected",
"disconnected": "Disconnected",
"unknown": "Unknown"
}
},
"current_l1": {
"name": "Current L1"
},
@@ -118,18 +88,9 @@
"power_production": {
"name": "Power production"
},
"range_remaining": {
"name": "Remaining range"
},
"signal_strength": {
"name": "Signal strength"
},
"storage_state_of_charge": {
"name": "Storage state of charge"
},
"storage_target_state_of_charge": {
"name": "Storage target state of charge"
},
"voltage_phase1": {
"name": "Voltage phase1"
},
@@ -142,15 +103,9 @@
}
},
"exceptions": {
"data_api_reauth_required": {
"message": "Reconnect Tibber so Home Assistant can enable the new Tibber Data API features."
},
"invalid_date": {
"message": "Invalid datetime provided {date}"
},
"oauth2_implementation_unavailable": {
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
"send_message_timeout": {
"message": "Timeout sending message with Tibber"
}

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from typing import Any
import voluptuous as vol
from wled import WLED, Device, WLEDConnectionError
from wled import WLED, Device, WLEDConnectionError, WLEDUnsupportedVersionError
from homeassistant.components import onboarding
from homeassistant.config_entries import (
@@ -48,6 +48,8 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN):
if user_input is not None:
try:
device = await self._async_get_device(user_input[CONF_HOST])
except WLEDUnsupportedVersionError:
errors["base"] = "unsupported_version"
except WLEDConnectionError:
errors["base"] = "cannot_connect"
else:
@@ -110,6 +112,8 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN):
self.discovered_host = discovery_info.host
try:
self.discovered_device = await self._async_get_device(discovery_info.host)
except WLEDUnsupportedVersionError:
return self.async_abort(reason="unsupported_version")
except WLEDConnectionError:
return self.async_abort(reason="cannot_connect")

View File

@@ -9,6 +9,7 @@ from wled import (
WLEDConnectionClosedError,
WLEDError,
WLEDReleases,
WLEDUnsupportedVersionError,
)
from homeassistant.config_entries import ConfigEntry
@@ -115,6 +116,14 @@ class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]):
"""Fetch data from WLED."""
try:
device = await self.wled.update()
except WLEDUnsupportedVersionError as error:
# Error message from WLED library contains version info
# better to show that to user, but it is not translatable.
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="unsupported_version",
translation_placeholders={"error": str(error)},
) from error
except WLEDError as error:
raise UpdateFailed(
translation_domain=DOMAIN,

View File

@@ -150,12 +150,9 @@ class WLEDSegmentLight(WLEDEntity, LightEntity):
@property
def available(self) -> bool:
"""Return True if entity is available."""
try:
self.coordinator.data.state.segments[self._segment]
except KeyError:
return False
return super().available
return (
super().available and self._segment in self.coordinator.data.state.segments
)
@property
def rgb_color(self) -> tuple[int, int, int] | None:

View File

@@ -97,12 +97,9 @@ class WLEDNumber(WLEDEntity, NumberEntity):
@property
def available(self) -> bool:
"""Return True if entity is available."""
try:
self.coordinator.data.state.segments[self._segment]
except KeyError:
return False
return super().available
return (
super().available and self._segment in self.coordinator.data.state.segments
)
@property
def native_value(self) -> float | None:

View File

@@ -0,0 +1,88 @@
rules:
# Bronze
action-setup:
status: exempt
comment: This integration does not have custom service actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
test_connection_error and test_unsupported_version_error should end in CREATE_ENTRY
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not have custom service actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: todo
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: exempt
comment: |
This integration does not require authentication.
test-coverage:
status: todo
comment: |
The test_setting_unique_id test is redundant.
The test_websocket_already_connected test can use the freezer.
The snapshot tests should be used more widely.
We should use pytest.mark.freeze_time instead of mock.
# Gold
devices: done
diagnostics: done
discovery-update-info: done
discovery: done
docs-data-update: todo
docs-examples: done
docs-known-limitations:
status: todo
comment: |
Analog RGBCCT Strip are poor supported by HA.
See: https://github.com/home-assistant/core/issues/123614
docs-supported-devices: todo
docs-supported-functions: done
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |
This integration has a fixed single device.
entity-category: done
entity-device-class:
status: todo
comment: Led count could receive unit of measurement
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: done
repair-issues:
status: exempt
comment: This integration does not have any known issues that require repair.
stale-devices:
status: exempt
comment: |
This integration has a fixed single device.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -173,12 +173,9 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity):
@property
def available(self) -> bool:
"""Return True if entity is available."""
try:
self.coordinator.data.state.segments[self._segment]
except KeyError:
return False
return super().available
return (
super().available and self._segment in self.coordinator.data.state.segments
)
@property
def current_option(self) -> str | None:

View File

@@ -1,13 +1,18 @@
{
"common": {
"unsupported_version": "The WLED device's firmware version is not supported."
},
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unique_id_mismatch": "MAC address does not match the configured device. Expected to connect to device with MAC: `{expected_mac}`, but connected to device with MAC: `{actual_mac}`. \n\nPlease ensure you reconfigure against the same device."
"unique_id_mismatch": "MAC address does not match the configured device. Expected to connect to device with MAC: `{expected_mac}`, but connected to device with MAC: `{actual_mac}`. \n\nPlease ensure you reconfigure against the same device.",
"unsupported_version": "[%key:component::wled::common::unsupported_version%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"unsupported_version": "[%key:component::wled::common::unsupported_version%]"
},
"flow_title": "{name}",
"step": {
@@ -138,6 +143,9 @@
},
"mac_address_mismatch": {
"message": "MAC address does not match the configured device. Expected to connect to device with MAC: {expected_mac}, but connected to device with MAC: {actual_mac}."
},
"unsupported_version": {
"message": "The WLED device's firmware version is not supported: {error}"
}
},
"options": {

View File

@@ -167,12 +167,9 @@ class WLEDReverseSwitch(WLEDEntity, SwitchEntity):
@property
def available(self) -> bool:
"""Return True if entity is available."""
try:
self.coordinator.data.state.segments[self._segment]
except KeyError:
return False
return super().available
return (
super().available and self._segment in self.coordinator.data.state.segments
)
@property
def is_on(self) -> bool:

View File

@@ -102,7 +102,7 @@ class WLEDUpdateEntity(WLEDEntity, UpdateEntity):
"""URL to the full release notes of the latest version available."""
if (version := self.latest_version) is None:
return None
return f"https://github.com/Aircoookie/WLED/releases/tag/v{version}"
return f"https://github.com/wled/WLED/releases/tag/v{version}"
@wled_exception_handler
async def async_install(

View File

@@ -37,7 +37,6 @@ APPLICATION_CREDENTIALS = [
"smartthings",
"spotify",
"tesla_fleet",
"tibber",
"twitch",
"volvo",
"weheat",

View File

@@ -3503,6 +3503,11 @@
"zwave"
]
},
"levoit": {
"name": "Levoit",
"integration_type": "virtual",
"supported_by": "vesync"
},
"lg": {
"name": "LG",
"integrations": {
@@ -4506,7 +4511,7 @@
},
"nordpool": {
"name": "Nord Pool",
"integration_type": "hub",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling",
"single_config_entry": true
@@ -4827,7 +4832,7 @@
},
"oralb": {
"name": "Oral-B",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "local_push"
},

View File

@@ -6,8 +6,8 @@ To update, run python3 -m script.hassfest
LABS_PREVIEW_FEATURES = {
"automation": {
"new_triggers_conditions": {
"feedback_url": "",
"learn_more_url": "",
"feedback_url": "https://forms.gle/fWFZqf5MzuwWTsCH8",
"learn_more_url": "https://www.home-assistant.io/blog/2025/12/03/release-202512/#purpose-specific-triggers-and-conditions",
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation",
},
},

View File

@@ -35,9 +35,9 @@ file-read-backwards==2.0.0
fnv-hash-fast==1.6.0
go2rtc-client==0.3.0
ha-ffmpeg==3.2.2
habluetooth==5.7.0
hass-nabucasa==1.6.2
hassil==3.4.0
habluetooth==5.8.0
hass-nabucasa==1.7.0
hassil==3.5.0
home-assistant-bluetooth==1.13.1
home-assistant-frontend==20251202.0
home-assistant-intents==2025.12.2

View File

@@ -48,7 +48,7 @@ dependencies = [
"fnv-hash-fast==1.6.0",
# hass-nabucasa is imported by helpers which don't depend on the cloud
# integration
"hass-nabucasa==1.6.2",
"hass-nabucasa==1.7.0",
# When bumping httpx, please check the version pins of
# httpcore, anyio, and h11 in gen_requirements_all
"httpx==0.28.1",

2
requirements.txt generated
View File

@@ -22,7 +22,7 @@ certifi>=2021.5.30
ciso8601==2.3.3
cronsim==2.7
fnv-hash-fast==1.6.0
hass-nabucasa==1.6.2
hass-nabucasa==1.7.0
httpx==0.28.1
home-assistant-bluetooth==1.13.1
ifaddr==0.2.0

20
requirements_all.txt generated
View File

@@ -340,7 +340,7 @@ aiontfy==0.6.1
aionut==4.3.4
# homeassistant.components.onkyo
aioonkyo==0.3.0
aioonkyo==0.4.0
# homeassistant.components.openexchangerates
aioopenexchangerates==0.6.8
@@ -1090,7 +1090,7 @@ google-genai==1.38.0
google-maps-routing==0.6.15
# homeassistant.components.nest
google-nest-sdm==9.1.1
google-nest-sdm==9.1.2
# homeassistant.components.google_photos
google-photos-library-api==0.12.1
@@ -1157,20 +1157,20 @@ ha-silabs-firmware-client==0.3.0
habiticalib==0.4.6
# homeassistant.components.bluetooth
habluetooth==5.7.0
habluetooth==5.8.0
# homeassistant.components.hanna
hanna-cloud==0.0.6
# homeassistant.components.cloud
hass-nabucasa==1.6.2
hass-nabucasa==1.7.0
# homeassistant.components.splunk
hass-splunk==0.1.1
# homeassistant.components.assist_satellite
# homeassistant.components.conversation
hassil==3.4.0
hassil==3.5.0
# homeassistant.components.jewish_calendar
hdate[astral]==1.1.2
@@ -1288,7 +1288,7 @@ insteon-frontend-home-assistant==0.5.0
intellifire4py==4.2.1
# homeassistant.components.iometer
iometer==0.2.0
iometer==0.3.0
# homeassistant.components.iotty
iottycloud==0.3.0
@@ -1370,7 +1370,7 @@ led-ble==1.1.7
lektricowifi==0.1
# homeassistant.components.letpot
letpot==0.6.3
letpot==0.6.4
# homeassistant.components.foscam
libpyfoscamcgi==0.0.9
@@ -1855,7 +1855,7 @@ pyRFXtrx==0.31.1
pySDCP==1
# homeassistant.components.tibber
pyTibber==0.33.1
pyTibber==0.32.2
# homeassistant.components.dlink
pyW215==0.8.0
@@ -2563,7 +2563,7 @@ python-rabbitair==0.0.8
python-ripple-api==0.0.3
# homeassistant.components.roborock
python-roborock==3.8.4
python-roborock==3.9.2
# homeassistant.components.smarttub
python-smarttub==0.0.45
@@ -3191,7 +3191,7 @@ wyoming==1.7.2
xiaomi-ble==1.2.0
# homeassistant.components.knx
xknx==3.11.0
xknx==3.12.0
# homeassistant.components.knx
xknxproject==3.8.2

View File

@@ -325,7 +325,7 @@ aiontfy==0.6.1
aionut==4.3.4
# homeassistant.components.onkyo
aioonkyo==0.3.0
aioonkyo==0.4.0
# homeassistant.components.openexchangerates
aioopenexchangerates==0.6.8
@@ -966,7 +966,7 @@ google-genai==1.38.0
google-maps-routing==0.6.15
# homeassistant.components.nest
google-nest-sdm==9.1.1
google-nest-sdm==9.1.2
# homeassistant.components.google_photos
google-photos-library-api==0.12.1
@@ -1027,17 +1027,17 @@ ha-silabs-firmware-client==0.3.0
habiticalib==0.4.6
# homeassistant.components.bluetooth
habluetooth==5.7.0
habluetooth==5.8.0
# homeassistant.components.hanna
hanna-cloud==0.0.6
# homeassistant.components.cloud
hass-nabucasa==1.6.2
hass-nabucasa==1.7.0
# homeassistant.components.assist_satellite
# homeassistant.components.conversation
hassil==3.4.0
hassil==3.5.0
# homeassistant.components.jewish_calendar
hdate[astral]==1.1.2
@@ -1134,7 +1134,7 @@ insteon-frontend-home-assistant==0.5.0
intellifire4py==4.2.1
# homeassistant.components.iometer
iometer==0.2.0
iometer==0.3.0
# homeassistant.components.iotty
iottycloud==0.3.0
@@ -1201,7 +1201,7 @@ led-ble==1.1.7
lektricowifi==0.1
# homeassistant.components.letpot
letpot==0.6.3
letpot==0.6.4
# homeassistant.components.foscam
libpyfoscamcgi==0.0.9
@@ -1580,7 +1580,7 @@ pyHomee==1.3.8
pyRFXtrx==0.31.1
# homeassistant.components.tibber
pyTibber==0.33.1
pyTibber==0.32.2
# homeassistant.components.dlink
pyW215==0.8.0
@@ -2144,7 +2144,7 @@ python-pooldose==0.8.1
python-rabbitair==0.0.8
# homeassistant.components.roborock
python-roborock==3.8.4
python-roborock==3.9.2
# homeassistant.components.smarttub
python-smarttub==0.0.45
@@ -2658,7 +2658,7 @@ wyoming==1.7.2
xiaomi-ble==1.2.0
# homeassistant.components.knx
xknx==3.11.0
xknx==3.12.0
# homeassistant.components.knx
xknxproject==3.8.2

View File

@@ -30,7 +30,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.9.6,source=/uv,target=/bin/uv \
ruff==0.13.0 \
PyTurboJPEG==1.8.0 \
ha-ffmpeg==3.2.2 \
hassil==3.4.0 \
hassil==3.5.0 \
home-assistant-intents==2025.12.2 \
mutagen==1.47.0 \
pymicro-vad==1.0.1 \

View File

@@ -444,7 +444,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
"greeneye_monitor",
"greenwave",
"group",
"growatt_server",
"gtfs",
"guardian",
"harman_kardon_avr",
@@ -1061,7 +1060,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
"wirelesstag",
"withings",
"wiz",
"wled",
"wmspro",
"wolflink",
"workday",
@@ -1454,8 +1452,8 @@ INTEGRATIONS_WITHOUT_SCALE = [
"greeneye_monitor",
"greenwave",
"group",
"growatt_server",
"gtfs",
"growatt_server",
"guardian",
"harman_kardon_avr",
"harmony",

View File

@@ -205,8 +205,8 @@ async def test_ws_get_client_config(
"iceServers": [
{
"urls": [
"stun:stun.home-assistant.io:80",
"stun:stun.home-assistant.io:3478",
"stun:stun.home-assistant.io:80",
]
},
],
@@ -238,8 +238,8 @@ async def test_ws_get_client_config(
"iceServers": [
{
"urls": [
"stun:stun.home-assistant.io:80",
"stun:stun.home-assistant.io:3478",
"stun:stun.home-assistant.io:80",
]
},
{

View File

@@ -11,6 +11,7 @@ import pytest
import voluptuous as vol
from homeassistant.components import conversation
from homeassistant.components.cloud.const import AI_TASK_ENTITY_UNIQUE_ID, DOMAIN
from homeassistant.components.cloud.entity import (
BaseCloudLLMEntity,
_convert_content_to_param,
@@ -18,7 +19,8 @@ from homeassistant.components.cloud.entity import (
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import llm, selector
from homeassistant.helpers import entity_registry as er, llm, selector
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
@@ -219,3 +221,66 @@ async def test_prepare_chat_for_generation_passes_messages_through(
assert response["messages"] == messages
assert response["conversation_id"] == "conversation-id"
async def test_async_handle_chat_log_service_sets_structured_output_non_strict(
hass: HomeAssistant,
cloud: MagicMock,
entity_registry: er.EntityRegistry,
mock_cloud_login: None,
) -> None:
"""Ensure structured output requests always disable strict validation via service."""
assert await async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
on_start_callback = cloud.register_on_start.call_args[0][0]
await on_start_callback()
await hass.async_block_till_done()
entity_id = entity_registry.async_get_entity_id(
"ai_task", DOMAIN, AI_TASK_ENTITY_UNIQUE_ID
)
assert entity_id is not None
async def _empty_stream():
return
async def _fake_delta_stream(
self: conversation.ChatLog,
agent_id: str,
stream,
):
content = conversation.AssistantContent(
agent_id=agent_id, content='{"value": "ok"}'
)
self.async_add_assistant_content_without_tools(content)
yield content
cloud.llm.async_generate_data = AsyncMock(return_value=_empty_stream())
with patch(
"homeassistant.components.conversation.chat_log.ChatLog.async_add_delta_content_stream",
_fake_delta_stream,
):
await hass.services.async_call(
"ai_task",
"generate_data",
{
"entity_id": entity_id,
"task_name": "Device Report",
"instructions": "Provide value.",
"structure": {
"value": {
"selector": {"text": None},
"required": True,
}
},
},
blocking=True,
return_response=True,
)
cloud.llm.async_generate_data.assert_awaited_once()
_, kwargs = cloud.llm.async_generate_data.call_args
assert kwargs["response_format"]["json_schema"]["strict"] is False

View File

@@ -22,6 +22,7 @@ from hass_nabucasa.payments_api import PaymentsApiError
from hass_nabucasa.remote import CertificateStatus
import pytest
from syrupy.assertion import SnapshotAssertion
from webrtc_models import RTCIceServer
from homeassistant.components import system_health
from homeassistant.components.alexa import errors as alexa_errors
@@ -2186,3 +2187,39 @@ async def test_download_support_package_integration_load_error(
req = await cloud_client.get("/api/cloud/support_package")
assert req.status == HTTPStatus.OK
assert await req.text() == snapshot
async def test_websocket_ice_servers(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
cloud: MagicMock,
setup_cloud: None,
) -> None:
"""Test getting ICE servers."""
cloud.client._ice_servers = [
RTCIceServer(urls="stun:stun.l.bla.com:19302"),
RTCIceServer(
urls="turn:turn.example.com:3478", username="user", credential="pass"
),
]
client = await hass_ws_client(hass)
await client.send_json_auto_id({"type": "cloud/webrtc/ice_servers"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == [
{"urls": "stun:stun.l.bla.com:19302"},
{
"urls": "turn:turn.example.com:3478",
"username": "user",
"credential": "pass",
},
]
cloud.id_token = None
await client.send_json_auto_id({"type": "cloud/webrtc/ice_servers"})
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "not_logged_in"

View File

@@ -73,7 +73,7 @@
}),
'original_device_class': <BinarySensorDeviceClass.POWER: 'power'>,
'original_icon': None,
'original_name': 'Deep Sleep',
'original_name': 'Deep sleep',
'platform': 'fressnapf_tracker',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -87,7 +87,7 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'power',
'friendly_name': 'Fluffy Deep Sleep',
'friendly_name': 'Fluffy Deep sleep',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.fluffy_deep_sleep',

View File

@@ -16,6 +16,7 @@ from homeassistant.components.notify import (
DOMAIN as NOTIFY_DOMAIN,
SERVICE_SEND_MESSAGE,
NotifyEntity,
NotifyEntityFeature,
)
from homeassistant.config_entries import ConfigEntry, ConfigFlow
from homeassistant.const import (
@@ -298,9 +299,11 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]:
class MockNotifyEntity(MockEntity, NotifyEntity):
"""Mock Email notifier entity to use in tests."""
def __init__(self, **values: Any) -> None:
def __init__(self, *, is_title_supported: bool, **values: Any) -> None:
"""Initialize the mock entity."""
super().__init__(**values)
if is_title_supported:
self._attr_supported_features = NotifyEntityFeature.TITLE
self.send_message_mock_calls = MagicMock()
async def async_send_message(self, message: str, title: str | None = None) -> None:
@@ -330,11 +333,21 @@ async def help_async_unload_entry(
@pytest.fixture
async def mock_notifiers(
hass: HomeAssistant, config_flow_fixture: None
) -> list[NotifyEntity]:
) -> list[MockNotifyEntity]:
"""Set up the notify entities."""
entity = MockNotifyEntity(name="test", entity_id="notify.test")
entity2 = MockNotifyEntity(name="test2", entity_id="notify.test2")
entities = [entity, entity2]
entity_title_1 = MockNotifyEntity(
is_title_supported=True, name="has_title_1", entity_id="notify.has_title_1"
)
entity_title_2 = MockNotifyEntity(
is_title_supported=True, name="has_title_2", entity_id="notify.has_title_2"
)
entity_no_title_1 = MockNotifyEntity(
is_title_supported=False, name="no_title_1", entity_id="notify.no_title_1"
)
entity_no_title_2 = MockNotifyEntity(
is_title_supported=False, name="no_title_2", entity_id="notify.no_title_2"
)
entities = [entity_title_1, entity_title_2, entity_no_title_1, entity_no_title_2]
test_entry = MockConfigEntry(domain="test")
test_entry.add_to_hass(hass)
mock_integration(
@@ -352,19 +365,23 @@ async def mock_notifiers(
async def test_notify_entity_group(
hass: HomeAssistant, mock_notifiers: list[NotifyEntity]
hass: HomeAssistant, mock_notifiers: list[MockNotifyEntity]
) -> None:
"""Test sending a message to a notify group."""
entity, entity2 = mock_notifiers
assert entity.send_message_mock_calls.call_count == 0
assert entity2.send_message_mock_calls.call_count == 0
entity_title_1, entity_title_2, entity_no_title_1, entity_no_title_2 = (
mock_notifiers
)
for mock_notifier in mock_notifiers:
assert mock_notifier.send_message_mock_calls.call_count == 0
# test group containing 1 member with title supported
config_entry = MockConfigEntry(
domain=DOMAIN,
options={
"group_type": "notify",
"name": "Test Group",
"entities": ["notify.test", "notify.test2"],
"entities": ["notify.has_title_1"],
"hide_members": True,
},
title="Test Group",
@@ -384,14 +401,144 @@ async def test_notify_entity_group(
blocking=True,
)
assert entity.send_message_mock_calls.call_count == 1
assert entity.send_message_mock_calls.call_args == call(
assert entity_title_1.send_message_mock_calls.call_count == 1
assert entity_title_1.send_message_mock_calls.call_args == call(
"Hello", title="Test notification"
)
assert entity2.send_message_mock_calls.call_count == 1
assert entity2.send_message_mock_calls.call_args == call(
for mock_notifier in mock_notifiers:
mock_notifier.send_message_mock_calls.reset_mock()
# test group containing 1 member with title supported but no title provided
await hass.services.async_call(
NOTIFY_DOMAIN,
SERVICE_SEND_MESSAGE,
{
ATTR_MESSAGE: "Hello",
ATTR_ENTITY_ID: "notify.test_group",
},
blocking=True,
)
assert entity_title_1.send_message_mock_calls.call_count == 1
assert entity_title_1.send_message_mock_calls.call_args == call("Hello", title=None)
for mock_notifier in mock_notifiers:
mock_notifier.send_message_mock_calls.reset_mock()
# test group containing 2 members with title supported
config_entry = MockConfigEntry(
domain=DOMAIN,
options={
"group_type": "notify",
"name": "Test Group 2",
"entities": ["notify.has_title_1", "notify.has_title_2"],
"hide_members": True,
},
title="Test Group 2",
)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
NOTIFY_DOMAIN,
SERVICE_SEND_MESSAGE,
{
ATTR_MESSAGE: "Hello",
ATTR_TITLE: "Test notification",
ATTR_ENTITY_ID: "notify.test_group_2",
},
blocking=True,
)
assert entity_title_1.send_message_mock_calls.call_count == 1
assert entity_title_1.send_message_mock_calls.call_args == call(
"Hello", title="Test notification"
)
assert entity_title_2.send_message_mock_calls.call_count == 1
assert entity_title_2.send_message_mock_calls.call_args == call(
"Hello", title="Test notification"
)
for mock_notifier in mock_notifiers:
mock_notifier.send_message_mock_calls.reset_mock()
# test group containing 2 members: 1 title supported and 1 not supported
# title is not supported since not all members support it
config_entry = MockConfigEntry(
domain=DOMAIN,
options={
"group_type": "notify",
"name": "Test Group",
"entities": ["notify.has_title_1", "notify.no_title_1"],
"hide_members": True,
},
title="Test Group 3",
)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
NOTIFY_DOMAIN,
SERVICE_SEND_MESSAGE,
{
ATTR_MESSAGE: "Hello",
ATTR_TITLE: "Test notification",
ATTR_ENTITY_ID: "notify.test_group_3",
},
blocking=True,
)
assert entity_title_1.send_message_mock_calls.call_count == 1
assert entity_title_1.send_message_mock_calls.call_args == call("Hello", title=None)
assert entity_no_title_1.send_message_mock_calls.call_count == 1
assert entity_no_title_1.send_message_mock_calls.call_args == call(
"Hello", title=None
)
for mock_notifier in mock_notifiers:
mock_notifier.send_message_mock_calls.reset_mock()
# test group containing 2 members: both not supporting title
config_entry = MockConfigEntry(
domain=DOMAIN,
options={
"group_type": "notify",
"name": "Test Group",
"entities": ["notify.no_title_1", "notify.no_title_2"],
"hide_members": True,
},
title="Test Group 4",
)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
NOTIFY_DOMAIN,
SERVICE_SEND_MESSAGE,
{
ATTR_MESSAGE: "Hello",
ATTR_TITLE: "Test notification",
ATTR_ENTITY_ID: "notify.test_group_4",
},
blocking=True,
)
assert entity_no_title_1.send_message_mock_calls.call_count == 1
assert entity_no_title_1.send_message_mock_calls.call_args == call(
"Hello", title=None
)
assert entity_no_title_2.send_message_mock_calls.call_count == 1
assert entity_no_title_2.send_message_mock_calls.call_args == call(
"Hello", title=None
)
async def test_state_reporting(hass: HomeAssistant) -> None:

View File

@@ -1041,6 +1041,7 @@ async def test_supervisor_issues_free_space(
fixable=False,
placeholders={
"more_info_free_space": "https://www.home-assistant.io/more-info/free-space",
"storage_url": "/config/storage",
"free_space": "1.6",
},
)
@@ -1090,6 +1091,7 @@ async def test_supervisor_issues_free_space_host_info_fail(
fixable=False,
placeholders={
"more_info_free_space": "https://www.home-assistant.io/more-info/free-space",
"storage_url": "/config/storage",
"free_space": "<2",
},
)

View File

@@ -0,0 +1,43 @@
{
"version": 2,
"minor_version": 2,
"key": "knx/config_store.json",
"data": {
"entities": {
"date": {
"knx_es_01K76NGZRMJA74CBRQF9KXNPE8": {
"entity": {
"name": "date_with_state_address",
"device_info": null,
"entity_category": null
},
"knx": {
"ga_date": {
"write": "0/0/1",
"state": "0/0/2",
"passive": []
},
"respond_to_read": true,
"sync_state": "init"
}
},
"knx_es_01K76NGZRMJA74CBRQF9KXNPE9": {
"entity": {
"name": "date_without_state_address",
"device_info": null,
"entity_category": null
},
"knx": {
"ga_date": {
"write": "0/0/1",
"state": null,
"passive": []
},
"respond_to_read": false,
"sync_state": false
}
}
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More