mirror of
https://github.com/home-assistant/core.git
synced 2025-12-24 08:48:24 +00:00
Compare commits
239 Commits
matter_tes
...
aioesphome
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bdf1e3b2d7 | ||
|
|
2e3eb0f9af | ||
|
|
2b5823c264 | ||
|
|
95165022db | ||
|
|
7c71c0377f | ||
|
|
b07b699e79 | ||
|
|
34db548725 | ||
|
|
5150efd63f | ||
|
|
0525c75686 | ||
|
|
7c14862f62 | ||
|
|
19f8d9d41b | ||
|
|
af1218876c | ||
|
|
9715a7cc32 | ||
|
|
b87b72ab01 | ||
|
|
0f3f16fabe | ||
|
|
85311e3def | ||
|
|
a33a4b6d9d | ||
|
|
02f412feb1 | ||
|
|
b3c78d4207 | ||
|
|
a3dec29c72 | ||
|
|
aa20a74a76 | ||
|
|
c0fa6ad2e0 | ||
|
|
5107b7012d | ||
|
|
bcc5985c8b | ||
|
|
5933c09a1d | ||
|
|
5f1e6f3633 | ||
|
|
6bd8d123ed | ||
|
|
50a51b5ecc | ||
|
|
c115b418ac | ||
|
|
2160827a50 | ||
|
|
82d84d7adf | ||
|
|
3e498d289b | ||
|
|
e6d8092c37 | ||
|
|
2e4f95c099 | ||
|
|
9f54b09423 | ||
|
|
8361d65d23 | ||
|
|
7a82aa4803 | ||
|
|
02ab11c1bd | ||
|
|
64f0a615df | ||
|
|
3e889616f2 | ||
|
|
bdbe2a6346 | ||
|
|
016d492342 | ||
|
|
9ce46c0937 | ||
|
|
8d96aee96e | ||
|
|
7083a0fdb7 | ||
|
|
e3976923b2 | ||
|
|
0b20417895 | ||
|
|
ed46c30b10 | ||
|
|
38f4cf0575 | ||
|
|
7b60cc3a80 | ||
|
|
fe0c92b6c5 | ||
|
|
c4386b4360 | ||
|
|
d4d26bccc1 | ||
|
|
550b7bf7ba | ||
|
|
6ff472ff87 | ||
|
|
ca30d8b1c2 | ||
|
|
aae98a77d5 | ||
|
|
30b7b24ddd | ||
|
|
a972a6d43a | ||
|
|
6e06c015df | ||
|
|
01c3e88e0f | ||
|
|
fd9064376a | ||
|
|
9eb5d452cf | ||
|
|
966209e4b6 | ||
|
|
a09ac94db9 | ||
|
|
0710cf3e6b | ||
|
|
a81f2a63c0 | ||
|
|
6ef2d0d0a3 | ||
|
|
911ea67a6d | ||
|
|
28dc32d5dc | ||
|
|
c95416cb48 | ||
|
|
7dc9084f06 | ||
|
|
39ba36d642 | ||
|
|
5009560f57 | ||
|
|
41e88573bb | ||
|
|
27ee986b1b | ||
|
|
c9d21c1851 | ||
|
|
2afbdc5757 | ||
|
|
14cb8af9fe | ||
|
|
74ae0f8297 | ||
|
|
3050a5c896 | ||
|
|
9f886e66c7 | ||
|
|
3c752d4516 | ||
|
|
e4bfdf5b30 | ||
|
|
3e43424a73 | ||
|
|
0db9dcfd1c | ||
|
|
5b5850224a | ||
|
|
065b0eb5b2 | ||
|
|
6a1d86d5db | ||
|
|
f99a73ef28 | ||
|
|
0436d30062 | ||
|
|
24b6b5452b | ||
|
|
8b91ebfe30 | ||
|
|
37d3b73c1b | ||
|
|
c881d9809e | ||
|
|
85dfe3a107 | ||
|
|
d8a468833e | ||
|
|
5bbd56b8e6 | ||
|
|
d0411b6613 | ||
|
|
293fbebef2 | ||
|
|
cfe542acb9 | ||
|
|
8da323d4b7 | ||
|
|
b2edf637cc | ||
|
|
de61a45de1 | ||
|
|
d9324cb0e4 | ||
|
|
4a464f601c | ||
|
|
43e9c24c18 | ||
|
|
1c3492b4c2 | ||
|
|
e0cb56a38c | ||
|
|
6e05cc4898 | ||
|
|
6f9dc2e5a2 | ||
|
|
ddb1ae371d | ||
|
|
6553337b79 | ||
|
|
aedc729d57 | ||
|
|
31fa69b609 | ||
|
|
b819a866b9 | ||
|
|
6cc7d83def | ||
|
|
5154418051 | ||
|
|
7e63c12b95 | ||
|
|
d17e951591 | ||
|
|
9198e5f56d | ||
|
|
97d7e0e01e | ||
|
|
4d5b8c4b08 | ||
|
|
abb011311e | ||
|
|
92cf7623fa | ||
|
|
aedf4c881b | ||
|
|
74baf44c83 | ||
|
|
9afb4a9eb8 | ||
|
|
e1967bef9a | ||
|
|
f17b6aa9e4 | ||
|
|
dd6d7397d9 | ||
|
|
aeabd2d2cc | ||
|
|
d7af2f39c2 | ||
|
|
a674ad11bc | ||
|
|
ccb64d7fd8 | ||
|
|
36691e2a3d | ||
|
|
8971f75f13 | ||
|
|
173db170af | ||
|
|
881851a4f6 | ||
|
|
4b4b64e939 | ||
|
|
e721c1a092 | ||
|
|
0933c9fe51 | ||
|
|
632b3e5dc3 | ||
|
|
434cb48344 | ||
|
|
86d4c3cbbf | ||
|
|
3019f9041c | ||
|
|
9e0a3dee08 | ||
|
|
fefe7d9e5d | ||
|
|
4c382cedff | ||
|
|
6ffd05313b | ||
|
|
9be0214021 | ||
|
|
54300430b7 | ||
|
|
a25038259e | ||
|
|
81be14c8f1 | ||
|
|
62464b83dc | ||
|
|
beb909528c | ||
|
|
ef28715360 | ||
|
|
78cc41fdc0 | ||
|
|
6a868ca5cc | ||
|
|
f43dead38c | ||
|
|
86163252e1 | ||
|
|
0cd5202596 | ||
|
|
33dcde7de1 | ||
|
|
c449b2e2e8 | ||
|
|
f40f7072c8 | ||
|
|
4163ecd833 | ||
|
|
9c59d528af | ||
|
|
c2440c4ebd | ||
|
|
cb275f65ba | ||
|
|
b1923df3ca | ||
|
|
7ddfd155ca | ||
|
|
e01df6d10d | ||
|
|
54010728d5 | ||
|
|
62a3b3827f | ||
|
|
b9abfba20f | ||
|
|
eca9f36e55 | ||
|
|
3c865c6f41 | ||
|
|
3b32c4bcbf | ||
|
|
fcdc1cfed9 | ||
|
|
0fd782c4ab | ||
|
|
bbcaf69973 | ||
|
|
f2b713acac | ||
|
|
6c944d6b15 | ||
|
|
4dd3abb16a | ||
|
|
d2672b9ddf | ||
|
|
ff30492919 | ||
|
|
b5ccdf8165 | ||
|
|
b3c745cfa7 | ||
|
|
67aeafa797 | ||
|
|
3d71b6de44 | ||
|
|
5349045932 | ||
|
|
4960871c84 | ||
|
|
af3861cd6b | ||
|
|
f9a070e9b3 | ||
|
|
fd503b2e33 | ||
|
|
e5a73fcf57 | ||
|
|
6991e01489 | ||
|
|
c8636ee6f3 | ||
|
|
52229dc5a8 | ||
|
|
f013455843 | ||
|
|
cae5bca546 | ||
|
|
49299b06c6 | ||
|
|
8e39027ad5 | ||
|
|
2a1ce2df61 | ||
|
|
7a6d929150 | ||
|
|
6f4a112dbb | ||
|
|
2197b910fb | ||
|
|
7e2a9cd7f9 | ||
|
|
e7ed7a8ed2 | ||
|
|
9ba2d0defe | ||
|
|
231300919c | ||
|
|
664c50586f | ||
|
|
43b9ecfc2b | ||
|
|
f1237ed52a | ||
|
|
ecf8f55cc4 | ||
|
|
ff36693057 | ||
|
|
005785997c | ||
|
|
9917b82b66 | ||
|
|
9c927406ac | ||
|
|
972d95602a | ||
|
|
5e0549a18f | ||
|
|
bcbb159fb2 | ||
|
|
0123ca656a | ||
|
|
1f699c729c | ||
|
|
50c3fcfeba | ||
|
|
2af1e098cc | ||
|
|
c418d9750b | ||
|
|
e96d614076 | ||
|
|
f0a5e0a023 | ||
|
|
6ac6b86060 | ||
|
|
3909171b1a | ||
|
|
769029505f | ||
|
|
080ec3524b | ||
|
|
48d671ad5f | ||
|
|
7115db5d22 | ||
|
|
d0c8792e4b | ||
|
|
84d7c37502 | ||
|
|
8a10638470 | ||
|
|
10dd53ffc2 |
3
.github/copilot-instructions.md
vendored
3
.github/copilot-instructions.md
vendored
@@ -51,6 +51,9 @@ rules:
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
|
||||
6
.github/workflows/builder.yml
vendored
6
.github/workflows/builder.yml
vendored
@@ -197,7 +197,7 @@ jobs:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Build variables
|
||||
id: vars
|
||||
@@ -405,7 +405,7 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.7.1
|
||||
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -551,7 +551,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -92,6 +92,7 @@ pip-selfcheck.json
|
||||
venv
|
||||
.venv
|
||||
Pipfile*
|
||||
uv.lock
|
||||
share/*
|
||||
/Scripts/
|
||||
|
||||
|
||||
@@ -567,6 +567,7 @@ homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
homeassistant.components.waqi.*
|
||||
homeassistant.components.water_heater.*
|
||||
homeassistant.components.watts.*
|
||||
homeassistant.components.watttime.*
|
||||
homeassistant.components.weather.*
|
||||
homeassistant.components.webhook.*
|
||||
|
||||
18
CODEOWNERS
generated
18
CODEOWNERS
generated
@@ -530,6 +530,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/flo/ @dmulcahey
|
||||
/homeassistant/components/flume/ @ChrisMandich @bdraco @jeeftor
|
||||
/tests/components/flume/ @ChrisMandich @bdraco @jeeftor
|
||||
/homeassistant/components/fluss/ @fluss
|
||||
/tests/components/fluss/ @fluss
|
||||
/homeassistant/components/flux_led/ @icemanch
|
||||
/tests/components/flux_led/ @icemanch
|
||||
/homeassistant/components/forecast_solar/ @klaasnicolaas @frenck
|
||||
@@ -664,8 +666,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/heos/ @andrewsayre
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
/tests/components/here_travel_time/ @eifinger
|
||||
/homeassistant/components/hikvision/ @mezz64
|
||||
/tests/components/hikvision/ @mezz64
|
||||
/homeassistant/components/hikvision/ @mezz64 @ptarjan
|
||||
/tests/components/hikvision/ @mezz64 @ptarjan
|
||||
/homeassistant/components/hikvisioncam/ @fbradyirl
|
||||
/homeassistant/components/hisense_aehw4a1/ @bannhead
|
||||
/tests/components/hisense_aehw4a1/ @bannhead
|
||||
@@ -794,6 +796,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intent_script/ @arturpragacz
|
||||
/tests/components/intent_script/ @arturpragacz
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @jukrebs
|
||||
/tests/components/iometer/ @jukrebs
|
||||
@@ -1195,8 +1199,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/overseerr/ @joostlek @AmGarera
|
||||
/tests/components/overseerr/ @joostlek @AmGarera
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@@ -1693,8 +1697,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/trafikverket_train/ @gjohansson-ST
|
||||
/homeassistant/components/trafikverket_weatherstation/ @gjohansson-ST
|
||||
/tests/components/trafikverket_weatherstation/ @gjohansson-ST
|
||||
/homeassistant/components/transmission/ @engrbm87 @JPHutchins
|
||||
/tests/components/transmission/ @engrbm87 @JPHutchins
|
||||
/homeassistant/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
|
||||
/tests/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
|
||||
/homeassistant/components/trend/ @jpbede
|
||||
/tests/components/trend/ @jpbede
|
||||
/homeassistant/components/triggercmd/ @rvmey
|
||||
@@ -1798,6 +1802,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/watergate/ @adam-the-hero
|
||||
/tests/components/watergate/ @adam-the-hero
|
||||
/homeassistant/components/watson_tts/ @rutkai
|
||||
/homeassistant/components/watts/ @theobld-ww @devender-verma-ww @ssi-spyro
|
||||
/tests/components/watts/ @theobld-ww @devender-verma-ww @ssi-spyro
|
||||
/homeassistant/components/watttime/ @bachya
|
||||
/tests/components/watttime/ @bachya
|
||||
/homeassistant/components/waze_travel_time/ @eifinger
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -24,7 +24,7 @@ ENV \
|
||||
COPY rootfs /
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:f394f6329f5389a4c9a7fc54b09fdec9621bbb78bf7a672b973440bbdfb02241 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
|
||||
@@ -402,6 +402,8 @@ class AuthManager:
|
||||
if user.is_owner:
|
||||
raise ValueError("Unable to deactivate the owner")
|
||||
await self._store.async_deactivate_user(user)
|
||||
for refresh_token in list(user.refresh_tokens.values()):
|
||||
self.async_remove_refresh_token(refresh_token)
|
||||
|
||||
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
|
||||
"""Remove credentials."""
|
||||
|
||||
@@ -624,13 +624,16 @@ async def async_enable_logging(
|
||||
|
||||
if log_file is None:
|
||||
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
if "SUPERVISOR" in os.environ:
|
||||
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||
if "SUPERVISOR" in os.environ and "HA_DUPLICATE_LOG_FILE" not in os.environ:
|
||||
# Rename the default log file if it exists, since previous versions created
|
||||
# it even on Supervisor
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
def rename_old_file() -> None:
|
||||
"""Rename old log file in executor."""
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
|
||||
await hass.async_add_executor_job(rename_old_file)
|
||||
err_log_path = None
|
||||
else:
|
||||
err_log_path = default_log_path
|
||||
|
||||
@@ -18,7 +18,7 @@ from .coordinator import (
|
||||
ActronAirSystemCoordinator,
|
||||
)
|
||||
|
||||
PLATFORM = [Platform.CLIMATE]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
@@ -50,10 +50,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
|
||||
system_coordinators=system_coordinators,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORM)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORM)
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -148,7 +148,7 @@ class ActronSystemClimate(BaseClimateEntity):
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_mode = self._status.user_aircon_settings.fan_mode
|
||||
fan_mode = self._status.user_aircon_settings.base_fan_mode
|
||||
return FAN_MODE_MAPPING_ACTRONAIR_TO_HA.get(fan_mode)
|
||||
|
||||
@property
|
||||
|
||||
30
homeassistant/components/actron_air/icons.json
Normal file
30
homeassistant/components/actron_air/icons.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"entity": {
|
||||
"switch": {
|
||||
"away_mode": {
|
||||
"default": "mdi:home-export-outline",
|
||||
"state": {
|
||||
"off": "mdi:home-import-outline"
|
||||
}
|
||||
},
|
||||
"continuous_fan": {
|
||||
"default": "mdi:fan",
|
||||
"state": {
|
||||
"off": "mdi:fan-off"
|
||||
}
|
||||
},
|
||||
"quiet_mode": {
|
||||
"default": "mdi:volume-low",
|
||||
"state": {
|
||||
"off": "mdi:volume-high"
|
||||
}
|
||||
},
|
||||
"turbo_mode": {
|
||||
"default": "mdi:fan-plus",
|
||||
"state": {
|
||||
"off": "mdi:fan"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,5 +13,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.2.0"]
|
||||
"requirements": ["actron-neo-api==0.4.1"]
|
||||
}
|
||||
|
||||
@@ -32,6 +32,22 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"switch": {
|
||||
"away_mode": {
|
||||
"name": "Away mode"
|
||||
},
|
||||
"continuous_fan": {
|
||||
"name": "Continuous fan"
|
||||
},
|
||||
"quiet_mode": {
|
||||
"name": "Quiet mode"
|
||||
},
|
||||
"turbo_mode": {
|
||||
"name": "Turbo mode"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
|
||||
110
homeassistant/components/actron_air/switch.py
Normal file
110
homeassistant/components/actron_air/switch.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Switch platform for Actron Air integration."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ActronAirSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Class describing Actron Air switch entities."""
|
||||
|
||||
is_on_fn: Callable[[ActronAirSystemCoordinator], bool]
|
||||
set_fn: Callable[[ActronAirSystemCoordinator, bool], Awaitable[None]]
|
||||
is_supported_fn: Callable[[ActronAirSystemCoordinator], bool] = lambda _: True
|
||||
|
||||
|
||||
SWITCHES: tuple[ActronAirSwitchEntityDescription, ...] = (
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="away_mode",
|
||||
translation_key="away_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.away_mode,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_away_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="continuous_fan",
|
||||
translation_key="continuous_fan",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.continuous_fan_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_continuous_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="quiet_mode",
|
||||
translation_key="quiet_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.quiet_mode_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_quiet_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="turbo_mode",
|
||||
translation_key="turbo_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_turbo_mode(enabled),
|
||||
is_supported_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_supported,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Actron Air switch entities."""
|
||||
system_coordinators = entry.runtime_data.system_coordinators
|
||||
async_add_entities(
|
||||
ActronAirSwitch(coordinator, description)
|
||||
for coordinator in system_coordinators.values()
|
||||
for description in SWITCHES
|
||||
if description.is_supported_fn(coordinator)
|
||||
)
|
||||
|
||||
|
||||
class ActronAirSwitch(CoordinatorEntity[ActronAirSystemCoordinator], SwitchEntity):
|
||||
"""Actron Air switch."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
entity_description: ActronAirSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
description: ActronAirSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.serial_number}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.serial_number)},
|
||||
manufacturer="Actron Air",
|
||||
name=coordinator.data.ac_system.system_name,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the switch is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.entity_description.set_fn(self.coordinator, True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.entity_description.set_fn(self.coordinator, False)
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
9
homeassistant/components/airobot/icons.json
Normal file
9
homeassistant/components/airobot/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"default": "mdi:delta"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
99
homeassistant/components/airobot/number.py
Normal file
99
homeassistant/components/airobot/number.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Number platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyairobotrest.const import HYSTERESIS_BAND_MAX, HYSTERESIS_BAND_MIN
|
||||
from pyairobotrest.exceptions import AirobotError
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotDataUpdateCoordinator
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotNumberEntityDescription(NumberEntityDescription):
|
||||
"""Describes Airobot number entity."""
|
||||
|
||||
value_fn: Callable[[AirobotDataUpdateCoordinator], float]
|
||||
set_value_fn: Callable[[AirobotDataUpdateCoordinator, float], Awaitable[None]]
|
||||
|
||||
|
||||
NUMBERS: tuple[AirobotNumberEntityDescription, ...] = (
|
||||
AirobotNumberEntityDescription(
|
||||
key="hysteresis_band",
|
||||
translation_key="hysteresis_band",
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
native_min_value=HYSTERESIS_BAND_MIN / 10.0,
|
||||
native_max_value=HYSTERESIS_BAND_MAX / 10.0,
|
||||
native_step=0.1,
|
||||
value_fn=lambda coordinator: coordinator.data.settings.hysteresis_band,
|
||||
set_value_fn=lambda coordinator, value: coordinator.client.set_hysteresis_band(
|
||||
value
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot number platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AirobotNumber(coordinator, description) for description in NUMBERS
|
||||
)
|
||||
|
||||
|
||||
class AirobotNumber(AirobotEntity, NumberEntity):
|
||||
"""Representation of an Airobot number entity."""
|
||||
|
||||
entity_description: AirobotNumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
description: AirobotNumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the number entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.value_fn(self.coordinator)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the value."""
|
||||
try:
|
||||
await self.entity_description.set_value_fn(self.coordinator, value)
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_value_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
else:
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -48,7 +48,7 @@ rules:
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
@@ -57,7 +57,7 @@ rules:
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
|
||||
@@ -44,6 +44,11 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"name": "Hysteresis band"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
@@ -74,6 +79,9 @@
|
||||
},
|
||||
"set_temperature_failed": {
|
||||
"message": "Failed to set temperature to {temperature}."
|
||||
},
|
||||
"set_value_failed": {
|
||||
"message": "Failed to set value: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,21 +88,11 @@ class AirPatrolClimate(AirPatrolEntity, ClimateEntity):
|
||||
super().__init__(coordinator, unit_id)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}-{unit_id}"
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data."""
|
||||
return self.device_data.get("climate") or {}
|
||||
|
||||
@property
|
||||
def params(self) -> dict[str, Any]:
|
||||
"""Return the current parameters for the climate entity."""
|
||||
return self.climate_data.get("ParametersData") or {}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and bool(self.climate_data)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.const import Platform
|
||||
DOMAIN = "airpatrol"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
AIRPATROL_ERRORS = (AirPatrolAuthenticationError, AirPatrolError)
|
||||
|
||||
@@ -38,7 +38,17 @@ class AirPatrolEntity(CoordinatorEntity[AirPatrolDataUpdateCoordinator]):
|
||||
"""Return the device data."""
|
||||
return self.coordinator.data[self._unit_id]
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data for this unit."""
|
||||
return self.device_data["climate"]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._unit_id in self.coordinator.data
|
||||
return (
|
||||
super().available
|
||||
and self._unit_id in self.coordinator.data
|
||||
and "climate" in self.device_data
|
||||
and self.climate_data is not None
|
||||
)
|
||||
|
||||
89
homeassistant/components/airpatrol/sensor.py
Normal file
89
homeassistant/components/airpatrol/sensor.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Sensors for AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirPatrolConfigEntry
|
||||
from .coordinator import AirPatrolDataUpdateCoordinator
|
||||
from .entity import AirPatrolEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirPatrolSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes AirPatrol sensor entity."""
|
||||
|
||||
data_field: str
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = (
|
||||
AirPatrolSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
data_field="RoomTemp",
|
||||
),
|
||||
AirPatrolSensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
data_field="RoomHumidity",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirPatrolConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AirPatrol sensors."""
|
||||
coordinator = config_entry.runtime_data
|
||||
units = coordinator.data
|
||||
|
||||
async_add_entities(
|
||||
AirPatrolSensor(coordinator, unit_id, description)
|
||||
for unit_id, unit in units.items()
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if "climate" in unit and unit["climate"] is not None
|
||||
)
|
||||
|
||||
|
||||
class AirPatrolSensor(AirPatrolEntity, SensorEntity):
|
||||
"""AirPatrol sensor entity."""
|
||||
|
||||
entity_description: AirPatrolSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirPatrolDataUpdateCoordinator,
|
||||
unit_id: str,
|
||||
description: AirPatrolSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize AirPatrol sensor."""
|
||||
super().__init__(coordinator, unit_id)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.unique_id}-{unit_id}-{description.key}"
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
if value := self.climate_data.get(self.entity_description.data_field):
|
||||
return float(value)
|
||||
return None
|
||||
@@ -4,10 +4,10 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityStateTriggerBase,
|
||||
EntityTargetStateTriggerBase,
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
|
||||
@@ -21,7 +21,7 @@ def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool
|
||||
return False
|
||||
|
||||
|
||||
class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
|
||||
class EntityStateTriggerRequiredFeatures(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_required_features: int
|
||||
@@ -38,21 +38,21 @@ class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
|
||||
|
||||
def make_entity_state_trigger_required_features(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityStateTriggerBase]:
|
||||
) -> type[EntityTargetStateTriggerBase]:
|
||||
"""Create an entity state trigger class."""
|
||||
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_to_state = to_state
|
||||
_to_states = {to_state}
|
||||
_required_features = required_features
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"armed": make_conditional_entity_state_trigger(
|
||||
"armed": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
AlarmControlPanelState.ARMING,
|
||||
@@ -89,8 +89,12 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"disarmed": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"triggered": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.TRIGGERED),
|
||||
"disarmed": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.DISARMED
|
||||
),
|
||||
"triggered": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
@@ -30,14 +30,11 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_credentials(
|
||||
auth: MSOB2CAuth, account_number: str
|
||||
) -> str | MSOB2CAuth:
|
||||
async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
|
||||
"""Validate the provided credentials."""
|
||||
try:
|
||||
await auth.send_login_request()
|
||||
@@ -46,6 +43,33 @@ async def validate_credentials(
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return "unknown"
|
||||
return auth
|
||||
|
||||
|
||||
def humanize_account_data(account: dict) -> str:
|
||||
"""Convert an account data into a human-readable format."""
|
||||
if account["address"]["company_name"] != "":
|
||||
return f"{account['account_number']} - {account['address']['company_name']}"
|
||||
if account["address"]["building_name"] != "":
|
||||
return f"{account['account_number']} - {account['address']['building_name']}"
|
||||
return f"{account['account_number']} - {account['address']['postcode']}"
|
||||
|
||||
|
||||
async def get_accounts(auth: MSOB2CAuth) -> list[selector.SelectOptionDict]:
|
||||
"""Retrieve the list of accounts associated with the authenticated user."""
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
accounts = await _aw.api.get_associated_accounts()
|
||||
return [
|
||||
selector.SelectOptionDict(
|
||||
value=str(account["account_number"]),
|
||||
label=humanize_account_data(account),
|
||||
)
|
||||
for account in accounts["result"]["active"]
|
||||
]
|
||||
|
||||
|
||||
async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2CAuth:
|
||||
"""Validate the provided account number."""
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
try:
|
||||
await _aw.validate_smart_meter(account_number)
|
||||
@@ -57,36 +81,91 @@ async def validate_credentials(
|
||||
class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anglian Water."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self.authenticator: MSOB2CAuth | None = None
|
||||
self.accounts: list[selector.SelectOptionDict] = []
|
||||
self.user_input: dict[str, Any] | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
validation_response = await validate_credentials(
|
||||
MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
self.authenticator = MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
validation_response = await validate_credentials(self.authenticator)
|
||||
if isinstance(validation_response, str):
|
||||
errors["base"] = validation_response
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
},
|
||||
self.accounts = await get_accounts(self.authenticator)
|
||||
if len(self.accounts) > 1:
|
||||
self.user_input = user_input
|
||||
return await self.async_step_select_account()
|
||||
account_number = self.accounts[0]["value"]
|
||||
self.user_input = user_input
|
||||
return await self.async_step_complete(
|
||||
{
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the account selection step."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
if TYPE_CHECKING:
|
||||
assert self.authenticator
|
||||
validation_result = await validate_account(
|
||||
self.authenticator,
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
if isinstance(validation_result, str):
|
||||
errors["base"] = validation_result
|
||||
else:
|
||||
return await self.async_step_complete(user_input)
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.SelectSelector(
|
||||
selector.SelectSelectorConfig(
|
||||
options=self.accounts,
|
||||
multiple=False,
|
||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_complete(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle the final configuration step."""
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
if TYPE_CHECKING:
|
||||
assert self.authenticator
|
||||
assert self.user_input
|
||||
config_entry_data = {
|
||||
**self.user_input,
|
||||
CONF_ACCOUNT_NUMBER: user_input[CONF_ACCOUNT_NUMBER],
|
||||
CONF_ACCESS_TOKEN: self.authenticator.refresh_token,
|
||||
}
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data=config_entry_data,
|
||||
)
|
||||
|
||||
@@ -4,13 +4,28 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.exceptions import ExpiredAccessTokenError, UnknownEndpointError
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
statistics_during_period,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import VolumeConverter
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
|
||||
@@ -44,6 +59,107 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data from Anglian Water's API."""
|
||||
try:
|
||||
return await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
await self._insert_statistics()
|
||||
except (ExpiredAccessTokenError, UnknownEndpointError) as err:
|
||||
raise UpdateFailed from err
|
||||
|
||||
async def _insert_statistics(self) -> None:
|
||||
"""Insert statistics for water meters into Home Assistant."""
|
||||
for meter in self.api.meters.values():
|
||||
id_prefix = (
|
||||
f"{self.config_entry.data[CONF_ACCOUNT_NUMBER]}_{meter.serial_number}"
|
||||
)
|
||||
usage_statistic_id = f"{DOMAIN}:{id_prefix}_usage".lower()
|
||||
_LOGGER.debug("Updating statistics for meter %s", meter.serial_number)
|
||||
name_prefix = (
|
||||
f"Anglian Water {self.config_entry.data[CONF_ACCOUNT_NUMBER]} "
|
||||
f"{meter.serial_number}"
|
||||
)
|
||||
usage_metadata = StatisticMetaData(
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"{name_prefix} Usage",
|
||||
source=DOMAIN,
|
||||
statistic_id=usage_statistic_id,
|
||||
unit_class=VolumeConverter.UNIT_CLASS,
|
||||
unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
)
|
||||
last_stat = await get_instance(self.hass).async_add_executor_job(
|
||||
get_last_statistics, self.hass, 1, usage_statistic_id, True, set()
|
||||
)
|
||||
if not last_stat:
|
||||
_LOGGER.debug("Updating statistics for the first time")
|
||||
usage_sum = 0.0
|
||||
last_stats_time = None
|
||||
else:
|
||||
if not meter.readings or len(meter.readings) == 0:
|
||||
_LOGGER.debug("No recent usage statistics found, skipping update")
|
||||
continue
|
||||
# Anglian Water stats are hourly, the read_at time is the time that the meter took the reading
|
||||
# We remove 1 hour from this so that the data is shown in the correct hour on the dashboards
|
||||
parsed_read_at = dt_util.parse_datetime(meter.readings[0]["read_at"])
|
||||
if not parsed_read_at:
|
||||
_LOGGER.debug(
|
||||
"Could not parse read_at time %s, skipping update",
|
||||
meter.readings[0]["read_at"],
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
_LOGGER.debug("Getting statistics at %s", start)
|
||||
for end in (start + timedelta(seconds=1), None):
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
self.hass,
|
||||
start,
|
||||
end,
|
||||
{
|
||||
usage_statistic_id,
|
||||
},
|
||||
"hour",
|
||||
None,
|
||||
{"sum"},
|
||||
)
|
||||
if stats:
|
||||
break
|
||||
if end:
|
||||
_LOGGER.debug(
|
||||
"Not found, trying to find oldest statistic after %s",
|
||||
start,
|
||||
)
|
||||
assert stats
|
||||
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
|
||||
usage_sum = _safe_get_sum(stats.get(usage_statistic_id, []))
|
||||
last_stats_time = stats[usage_statistic_id][0]["start"]
|
||||
|
||||
usage_statistics = []
|
||||
|
||||
for read in meter.readings:
|
||||
parsed_read_at = dt_util.parse_datetime(read["read_at"])
|
||||
if not parsed_read_at:
|
||||
_LOGGER.debug(
|
||||
"Could not parse read_at time %s, skipping reading",
|
||||
read["read_at"],
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
continue
|
||||
usage_state = max(0, read["consumption"] / 1000)
|
||||
usage_sum = max(0, read["read"])
|
||||
usage_statistics.append(
|
||||
StatisticData(
|
||||
start=start,
|
||||
state=usage_state,
|
||||
sum=usage_sum,
|
||||
)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Adding %s statistics for %s", len(usage_statistics), usage_statistic_id
|
||||
)
|
||||
async_add_external_statistics(self.hass, usage_metadata, usage_statistics)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "anglian_water",
|
||||
"name": "Anglian Water",
|
||||
"after_dependencies": ["recorder"],
|
||||
"codeowners": ["@pantherale0"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
|
||||
@@ -10,14 +10,21 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"select_account": {
|
||||
"data": {
|
||||
"account_number": "Billing account number"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Select the billing account you wish to use."
|
||||
},
|
||||
"description": "Multiple active billing accounts were found with your credentials. Please select the account you wish to use. If this is unexpected, contact Anglian Water to confirm your active accounts."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"account_number": "Billing Account Number",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Your account number found on your latest bill.",
|
||||
"password": "Your password",
|
||||
"username": "Username or email used to log in to the Anglian Water website."
|
||||
},
|
||||
|
||||
@@ -1,16 +1,22 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"idle": make_entity_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_state_trigger(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"processing": make_entity_state_trigger(DOMAIN, AssistSatelliteState.PROCESSING),
|
||||
"responding": make_entity_state_trigger(DOMAIN, AssistSatelliteState.RESPONDING),
|
||||
"idle": make_entity_target_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.LISTENING
|
||||
),
|
||||
"processing": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.PROCESSING
|
||||
),
|
||||
"responding": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.RESPONDING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.2"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.4"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["autarco==3.2.0"]
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ from homeassistant.const import (
|
||||
CONF_EVENT_DATA,
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_OPTIONS,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TRIGGERS,
|
||||
@@ -130,9 +131,13 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"fan",
|
||||
"humidifier",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"lock",
|
||||
"media_player",
|
||||
"scene",
|
||||
"siren",
|
||||
"switch",
|
||||
"text",
|
||||
"update",
|
||||
@@ -1214,7 +1219,7 @@ def _trigger_extract_entities(trigger_conf: dict) -> list[str]:
|
||||
return trigger_conf[CONF_ENTITY_ID] # type: ignore[no-any-return]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "calendar":
|
||||
return [trigger_conf[CONF_ENTITY_ID]]
|
||||
return [trigger_conf[CONF_OPTIONS][CONF_ENTITY_ID]]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "zone":
|
||||
return trigger_conf[CONF_ENTITY_ID] + [trigger_conf[CONF_ZONE]] # type: ignore[no-any-return]
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==65"],
|
||||
"requirements": ["axis==66"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import BeoConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .util import get_device_buttons
|
||||
from .util import get_device_buttons, get_remote_keys, get_remotes
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -53,4 +53,23 @@ async def async_get_config_entry_diagnostics(
|
||||
state_dict.pop("context")
|
||||
data[f"{device_button}_event"] = state_dict
|
||||
|
||||
# Get remotes
|
||||
for remote in await get_remotes(config_entry.runtime_data.client):
|
||||
# Get key Event entity states (if enabled)
|
||||
for key_type in get_remote_keys():
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
EVENT_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{remote.serial_number}_{config_entry.unique_id}_{key_type}",
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data[f"remote_{remote.serial_number}_{key_type}_event"] = state_dict
|
||||
|
||||
# Add remote Mozart model
|
||||
data[f"remote_{remote.serial_number}"] = dict(remote)
|
||||
|
||||
return data
|
||||
|
||||
@@ -16,11 +16,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BeoConfigEntry
|
||||
from .const import (
|
||||
BEO_REMOTE_CONTROL_KEYS,
|
||||
BEO_REMOTE_KEY_EVENTS,
|
||||
BEO_REMOTE_KEYS,
|
||||
BEO_REMOTE_SUBMENU_CONTROL,
|
||||
BEO_REMOTE_SUBMENU_LIGHT,
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DOMAIN,
|
||||
@@ -29,7 +25,7 @@ from .const import (
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BeoEntity
|
||||
from .util import get_device_buttons, get_remotes
|
||||
from .util import get_device_buttons, get_remote_keys, get_remotes
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -40,38 +36,19 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Event entities from config entry."""
|
||||
entities: list[BeoEvent] = []
|
||||
|
||||
async_add_entities(
|
||||
entities: list[BeoEvent] = [
|
||||
BeoButtonEvent(config_entry, button_type)
|
||||
for button_type in get_device_buttons(config_entry.data[CONF_MODEL])
|
||||
)
|
||||
]
|
||||
|
||||
# Check for connected Beoremote One
|
||||
remotes = await get_remotes(config_entry.runtime_data.client)
|
||||
|
||||
for remote in remotes:
|
||||
# Add Light keys
|
||||
entities.extend(
|
||||
[
|
||||
BeoRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}",
|
||||
)
|
||||
for key_type in BEO_REMOTE_KEYS
|
||||
]
|
||||
)
|
||||
|
||||
# Add Control keys
|
||||
entities.extend(
|
||||
[
|
||||
BeoRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}",
|
||||
)
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
BeoRemoteKeyEvent(config_entry, remote, key_type)
|
||||
for key_type in get_remote_keys()
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -11,7 +11,16 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .const import DEVICE_BUTTONS, DOMAIN, BeoButtons, BeoModel
|
||||
from .const import (
|
||||
BEO_REMOTE_CONTROL_KEYS,
|
||||
BEO_REMOTE_KEYS,
|
||||
BEO_REMOTE_SUBMENU_CONTROL,
|
||||
BEO_REMOTE_SUBMENU_LIGHT,
|
||||
DEVICE_BUTTONS,
|
||||
DOMAIN,
|
||||
BeoButtons,
|
||||
BeoModel,
|
||||
)
|
||||
|
||||
|
||||
def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
@@ -64,3 +73,14 @@ def get_device_buttons(model: BeoModel) -> list[str]:
|
||||
buttons.remove(BeoButtons.BLUETOOTH)
|
||||
|
||||
return buttons
|
||||
|
||||
|
||||
def get_remote_keys() -> list[str]:
|
||||
"""Get remote keys for the Beoremote One. Formatted for Home Assistant use."""
|
||||
return [
|
||||
*[f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}" for key_type in BEO_REMOTE_KEYS],
|
||||
*[
|
||||
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}"
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
],
|
||||
]
|
||||
|
||||
@@ -4,7 +4,7 @@ from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import EntityStateTriggerBase, Trigger
|
||||
from homeassistant.helpers.trigger import EntityTargetStateTriggerBase, Trigger
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from . import DOMAIN, BinarySensorDeviceClass
|
||||
@@ -20,7 +20,7 @@ def get_device_class_or_undefined(
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class BinarySensorOnOffTrigger(EntityStateTriggerBase):
|
||||
class BinarySensorOnOffTrigger(EntityTargetStateTriggerBase):
|
||||
"""Class for binary sensor on/off triggers."""
|
||||
|
||||
_device_class: BinarySensorDeviceClass | None
|
||||
@@ -47,7 +47,7 @@ def make_binary_sensor_trigger(
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
_to_state = to_state
|
||||
_to_states = {to_state}
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"domain": "blackbird",
|
||||
"name": "Monoprice Blackbird Matrix Switch",
|
||||
"codeowners": [],
|
||||
"disabled": "This integration is disabled because it references pyserial-asyncio, which does blocking I/O in the asyncio loop and is not maintained.",
|
||||
"documentation": "https://www.home-assistant.io/integrations/blackbird",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyblackbird"],
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import (
|
||||
@@ -42,7 +43,12 @@ from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
from .utils import (
|
||||
dispatcher_join_signal,
|
||||
dispatcher_unjoin_signal,
|
||||
format_unique_id,
|
||||
id_to_paired_player,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import BluesoundConfigEntry
|
||||
@@ -83,9 +89,11 @@ async def async_setup_entry(
|
||||
SERVICE_CLEAR_TIMER, None, "async_clear_timer"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join"
|
||||
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_bluesound_join"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_UNJOIN, None, "async_bluesound_unjoin"
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
@@ -120,6 +128,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self._presets: list[Preset] = coordinator.data.presets
|
||||
self._group_name: str | None = None
|
||||
self._group_list: list[str] = []
|
||||
self._group_members: list[str] | None = None
|
||||
self._bluesound_device_name = sync_status.name
|
||||
self._player = player
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
@@ -180,6 +189,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._group_list = self.rebuild_bluesound_group()
|
||||
self._group_members = self.rebuild_group_members()
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -365,11 +375,13 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
)
|
||||
|
||||
supported = (
|
||||
MediaPlayerEntityFeature.CLEAR_PLAYLIST
|
||||
| MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
)
|
||||
|
||||
if not self._status.indexing:
|
||||
@@ -421,8 +433,57 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
return shuffle
|
||||
|
||||
async def async_join(self, master: str) -> None:
|
||||
@property
|
||||
def group_members(self) -> list[str] | None:
|
||||
"""Get list of group members. Leader is always first."""
|
||||
return self._group_members
|
||||
|
||||
async def async_join_players(self, group_members: list[str]) -> None:
|
||||
"""Join `group_members` as a player group with the current player."""
|
||||
if self.entity_id in group_members:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
|
||||
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
|
||||
|
||||
paired_players = []
|
||||
for group_member in group_members:
|
||||
sync_status = entity_ids_with_sync_status.get(group_member)
|
||||
if sync_status is None:
|
||||
continue
|
||||
paired_player = id_to_paired_player(sync_status.id)
|
||||
if paired_player:
|
||||
paired_players.append(paired_player)
|
||||
|
||||
if paired_players:
|
||||
await self._player.add_followers(paired_players)
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Remove this player from any group."""
|
||||
if self._sync_status.leader is not None:
|
||||
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
|
||||
)
|
||||
|
||||
if self._sync_status.followers is not None:
|
||||
await self._player.remove_follower(self.host, self.port)
|
||||
|
||||
async def async_bluesound_join(self, master: str) -> None:
|
||||
"""Join the player to a group."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_JOIN}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_join",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
|
||||
if master == self.entity_id:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
|
||||
@@ -431,18 +492,24 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self.hass, dispatcher_join_signal(master), self.host, self.port
|
||||
)
|
||||
|
||||
async def async_unjoin(self) -> None:
|
||||
async def async_bluesound_unjoin(self) -> None:
|
||||
"""Unjoin the player from a group."""
|
||||
if self._sync_status.leader is None:
|
||||
return
|
||||
|
||||
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
|
||||
|
||||
_LOGGER.debug("Trying to unjoin player: %s", self.id)
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_UNJOIN}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_unjoin",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
|
||||
await self.async_unjoin_player()
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""List members in group."""
|
||||
@@ -488,6 +555,63 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
follower_names.insert(0, leader_sync_status.name)
|
||||
return follower_names
|
||||
|
||||
def rebuild_group_members(self) -> list[str] | None:
|
||||
"""Get list of group members. Leader is always first."""
|
||||
if self.sync_status.leader is None and self.sync_status.followers is None:
|
||||
return None
|
||||
|
||||
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
|
||||
|
||||
leader_entity_id = None
|
||||
followers = None
|
||||
if self.sync_status.followers is not None:
|
||||
leader_entity_id = self.entity_id
|
||||
followers = self.sync_status.followers
|
||||
elif self.sync_status.leader is not None:
|
||||
leader_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
|
||||
for entity_id, sync_status in entity_ids_with_sync_status.items():
|
||||
if sync_status.id == leader_id:
|
||||
leader_entity_id = entity_id
|
||||
followers = sync_status.followers
|
||||
break
|
||||
|
||||
if leader_entity_id is None or followers is None:
|
||||
return None
|
||||
|
||||
grouped_entity_ids = [leader_entity_id]
|
||||
for follower in followers:
|
||||
follower_id = f"{follower.ip}:{follower.port}"
|
||||
entity_ids = [
|
||||
entity_id
|
||||
for entity_id, sync_status in entity_ids_with_sync_status.items()
|
||||
if sync_status.id == follower_id
|
||||
]
|
||||
match entity_ids:
|
||||
case [entity_id]:
|
||||
grouped_entity_ids.append(entity_id)
|
||||
|
||||
return grouped_entity_ids
|
||||
|
||||
def _entity_ids_with_sync_status(self) -> dict[str, SyncStatus]:
|
||||
result = {}
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
config_entries: list[BluesoundConfigEntry] = (
|
||||
self.hass.config_entries.async_entries(DOMAIN)
|
||||
)
|
||||
for config_entry in config_entries:
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry.entry_id
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
if entity_entry.domain == "media_player":
|
||||
result[entity_entry.entity_id] = (
|
||||
config_entry.runtime_data.coordinator.data.sync_status
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
async def async_add_follower(self, host: str, port: int) -> None:
|
||||
"""Add follower to leader."""
|
||||
await self._player.add_follower(host, port)
|
||||
|
||||
@@ -41,9 +41,17 @@
|
||||
"description": "Use `button.{name}_clear_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.clear_sleep_timer"
|
||||
},
|
||||
"deprecated_service_join": {
|
||||
"description": "Use the `media_player.join` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.join"
|
||||
},
|
||||
"deprecated_service_set_sleep_timer": {
|
||||
"description": "Use `button.{name}_set_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.set_sleep_timer"
|
||||
},
|
||||
"deprecated_service_unjoin": {
|
||||
"description": "Use the `media_player.unjoin` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.unjoin"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Utility functions for the Bluesound component."""
|
||||
|
||||
from pyblu import PairedPlayer
|
||||
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
|
||||
@@ -19,3 +21,12 @@ def dispatcher_unjoin_signal(leader_id: str) -> str:
|
||||
Id is ip_address:port. This can be obtained from sync_status.id.
|
||||
"""
|
||||
return f"bluesound_unjoin_{leader_id}"
|
||||
|
||||
|
||||
def id_to_paired_player(id: str) -> PairedPlayer | None:
|
||||
"""Try to convert id in format 'ip:port' to PairedPlayer. Returns None if unable to do so."""
|
||||
match id.rsplit(":", 1):
|
||||
case [str() as ip, str() as port] if port.isdigit():
|
||||
return PairedPlayer(ip, int(port))
|
||||
case _:
|
||||
return None
|
||||
|
||||
@@ -2,29 +2,30 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_PLATFORM
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_OPTIONS
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
async_track_time_interval,
|
||||
)
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import CalendarEntity, CalendarEvent
|
||||
from .const import DATA_COMPONENT, DOMAIN
|
||||
from .const import DATA_COMPONENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,13 +33,17 @@ EVENT_START = "start"
|
||||
EVENT_END = "end"
|
||||
UPDATE_INTERVAL = datetime.timedelta(minutes=15)
|
||||
|
||||
TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
|
||||
_OPTIONS_SCHEMA_DICT = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
|
||||
_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): DOMAIN,
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
vol.Required(CONF_OPTIONS): _OPTIONS_SCHEMA_DICT,
|
||||
},
|
||||
)
|
||||
|
||||
# mypy: disallow-any-generics
|
||||
@@ -169,14 +174,14 @@ class CalendarEventListener:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
job: HassJob[..., Coroutine[Any, Any, None] | Any],
|
||||
trigger_data: dict[str, Any],
|
||||
action_runner: TriggerActionRunner,
|
||||
trigger_payload: dict[str, Any],
|
||||
fetcher: QueuedEventFetcher,
|
||||
) -> None:
|
||||
"""Initialize CalendarEventListener."""
|
||||
self._hass = hass
|
||||
self._job = job
|
||||
self._trigger_data = trigger_data
|
||||
self._action_runner = action_runner
|
||||
self._trigger_payload = trigger_payload
|
||||
self._unsub_event: CALLBACK_TYPE | None = None
|
||||
self._unsub_refresh: CALLBACK_TYPE | None = None
|
||||
self._fetcher = fetcher
|
||||
@@ -233,15 +238,11 @@ class CalendarEventListener:
|
||||
while self._events and self._events[0].trigger_time <= now:
|
||||
queued_event = self._events.pop(0)
|
||||
_LOGGER.debug("Dispatching event: %s", queued_event.event)
|
||||
self._hass.async_run_hass_job(
|
||||
self._job,
|
||||
{
|
||||
"trigger": {
|
||||
**self._trigger_data,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
},
|
||||
)
|
||||
payload = {
|
||||
**self._trigger_payload,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
self._action_runner(payload, "calendar event state change")
|
||||
|
||||
async def _handle_refresh(self, now_utc: datetime.datetime) -> None:
|
||||
"""Handle core config update."""
|
||||
@@ -259,31 +260,69 @@ class CalendarEventListener:
|
||||
self._listen_next_calendar_event()
|
||||
|
||||
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
trigger_info: TriggerInfo,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach trigger for the specified calendar."""
|
||||
entity_id = config[CONF_ENTITY_ID]
|
||||
event_type = config[CONF_EVENT]
|
||||
offset = config[CONF_OFFSET]
|
||||
class EventTrigger(Trigger):
|
||||
"""Calendar event trigger."""
|
||||
|
||||
# Validate the entity id is valid
|
||||
get_entity(hass, entity_id)
|
||||
_options: dict[str, Any]
|
||||
|
||||
trigger_data = {
|
||||
**trigger_info["trigger_data"],
|
||||
"platform": DOMAIN,
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
listener = CalendarEventListener(
|
||||
hass,
|
||||
HassJob(action),
|
||||
trigger_data,
|
||||
queued_event_fetcher(event_fetcher(hass, entity_id), event_type, offset),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _CONFIG_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
|
||||
entity_id = self._options[CONF_ENTITY_ID]
|
||||
event_type = self._options[CONF_EVENT]
|
||||
offset = self._options[CONF_OFFSET]
|
||||
|
||||
# Validate the entity id is valid
|
||||
get_entity(self._hass, entity_id)
|
||||
|
||||
trigger_data = {
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
listener = CalendarEventListener(
|
||||
self._hass,
|
||||
run_action,
|
||||
trigger_data,
|
||||
queued_event_fetcher(
|
||||
event_fetcher(self._hass, entity_id), event_type, offset
|
||||
),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"_": EventTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for calendars."""
|
||||
return TRIGGERS
|
||||
|
||||
@@ -98,6 +98,21 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"current_temperature_crossed_threshold": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"hvac_mode_changed": {
|
||||
"trigger": "mdi:thermostat"
|
||||
},
|
||||
"started_cooling": {
|
||||
"trigger": "mdi:snowflake"
|
||||
},
|
||||
@@ -107,6 +122,18 @@
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:power-off"
|
||||
},
|
||||
|
||||
@@ -192,12 +192,26 @@
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"number_or_entity": {
|
||||
"choices": {
|
||||
"entity": "Entity",
|
||||
"number": "Number"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
},
|
||||
"trigger_threshold_type": {
|
||||
"options": {
|
||||
"above": "Above a value",
|
||||
"below": "Below a value",
|
||||
"between": "In a range",
|
||||
"outside": "Outside a range"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -298,6 +312,92 @@
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the humidity is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the humidity is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity changed"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity crossed threshold"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the temperature is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the temperature is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature changed"
|
||||
},
|
||||
"current_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature crossed threshold"
|
||||
},
|
||||
"hvac_mode_changed": {
|
||||
"description": "Triggers after the mode of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"hvac_mode": {
|
||||
"description": "The HVAC modes to trigger on.",
|
||||
"name": "Modes"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device mode changed"
|
||||
},
|
||||
"started_cooling": {
|
||||
"description": "Triggers after one or more climate-control devices start cooling.",
|
||||
"fields": {
|
||||
@@ -328,6 +428,78 @@
|
||||
},
|
||||
"name": "Climate-control device started heating"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target humidity is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the target humidity is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity changed"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity crossed threshold"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target temperature is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the target temperature is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature changed"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature crossed threshold"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more climate-control devices turn off.",
|
||||
"fields": {
|
||||
|
||||
@@ -1,24 +1,91 @@
|
||||
"""Provides triggers for climates."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_TEMPERATURE, CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityTargetStateTriggerBase,
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_attribute_trigger,
|
||||
make_entity_state_trigger,
|
||||
TriggerConfig,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_attribute_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
from .const import (
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_HUMIDITY,
|
||||
ATTR_HVAC_ACTION,
|
||||
DOMAIN,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
|
||||
CONF_HVAC_MODE = "hvac_mode"
|
||||
|
||||
HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(CONF_HVAC_MODE): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), [HVACMode]
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class HVACModeChangedTrigger(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_schema = HVAC_MODE_CHANGED_TRIGGER_SCHEMA
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
self._to_states = set(self._options[CONF_HVAC_MODE])
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_cooling": make_entity_state_attribute_trigger(
|
||||
"current_humidity_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"current_humidity_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"current_temperature_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_CURRENT_TEMPERATURE
|
||||
),
|
||||
"current_temperature_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_CURRENT_TEMPERATURE
|
||||
),
|
||||
"hvac_mode_changed": HVACModeChangedTrigger,
|
||||
"started_cooling": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.COOLING
|
||||
),
|
||||
"started_drying": make_entity_state_attribute_trigger(
|
||||
"started_drying": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.DRYING
|
||||
),
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_conditional_entity_state_trigger(
|
||||
"target_humidity_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_HUMIDITY
|
||||
),
|
||||
"target_humidity_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_HUMIDITY
|
||||
),
|
||||
"target_temperature_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_TEMPERATURE
|
||||
),
|
||||
"target_temperature_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_TEMPERATURE
|
||||
),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
HVACMode.OFF,
|
||||
@@ -32,7 +99,7 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
HVACMode.HEAT_COOL,
|
||||
},
|
||||
),
|
||||
"started_heating": make_entity_state_attribute_trigger(
|
||||
"started_heating": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.HEATING
|
||||
),
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
target: &trigger_climate_target
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
behavior:
|
||||
behavior: &trigger_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
@@ -14,8 +14,109 @@
|
||||
- last
|
||||
- any
|
||||
|
||||
.number_or_entity: &number_or_entity
|
||||
required: false
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain:
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
.trigger_threshold_type: &trigger_threshold_type
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- above
|
||||
- below
|
||||
- between
|
||||
- outside
|
||||
translation_key: trigger_threshold_type
|
||||
|
||||
started_cooling: *trigger_common
|
||||
started_drying: *trigger_common
|
||||
started_heating: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
|
||||
hvac_mode_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
hvac_mode:
|
||||
context:
|
||||
filter_target: target
|
||||
required: true
|
||||
selector:
|
||||
state:
|
||||
hide_states:
|
||||
- unavailable
|
||||
- unknown
|
||||
multiple: true
|
||||
|
||||
current_humidity_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
current_humidity_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
target_humidity_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
target_humidity_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
current_temperature_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
current_temperature_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
target_temperature_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
target_temperature_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_SWING_SUPPORT, DOMAIN
|
||||
from .const import CONF_SEND_WAKEUP_PROMPT, CONF_SWING_SUPPORT, DOMAIN
|
||||
from .coordinator import CoolmasterConfigEntry, CoolmasterDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR]
|
||||
@@ -17,10 +17,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
"""Set up Coolmaster from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[CONF_PORT]
|
||||
send_wakeup_prompt = entry.data.get(CONF_SEND_WAKEUP_PROMPT, False)
|
||||
if not entry.data.get(CONF_SWING_SUPPORT):
|
||||
coolmaster = CoolMasterNet(
|
||||
host,
|
||||
port,
|
||||
send_initial_line_feed=send_wakeup_prompt,
|
||||
)
|
||||
else:
|
||||
# Swing support adds an additional request per unit. The requests are
|
||||
@@ -29,6 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
coolmaster = CoolMasterNet(
|
||||
host,
|
||||
port,
|
||||
send_initial_line_feed=send_wakeup_prompt,
|
||||
read_timeout=5,
|
||||
swing_support=True,
|
||||
)
|
||||
|
||||
@@ -12,7 +12,13 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import CONF_SUPPORTED_MODES, CONF_SWING_SUPPORT, DEFAULT_PORT, DOMAIN
|
||||
from .const import (
|
||||
CONF_SEND_WAKEUP_PROMPT,
|
||||
CONF_SUPPORTED_MODES,
|
||||
CONF_SWING_SUPPORT,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
AVAILABLE_MODES = [
|
||||
HVACMode.OFF.value,
|
||||
@@ -25,17 +31,15 @@ AVAILABLE_MODES = [
|
||||
|
||||
MODES_SCHEMA = {vol.Required(mode, default=True): bool for mode in AVAILABLE_MODES}
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
**MODES_SCHEMA,
|
||||
vol.Required(CONF_SWING_SUPPORT, default=False): bool,
|
||||
}
|
||||
)
|
||||
DATA_SCHEMA = {
|
||||
vol.Required(CONF_HOST): str,
|
||||
**MODES_SCHEMA,
|
||||
vol.Required(CONF_SWING_SUPPORT, default=False): bool,
|
||||
}
|
||||
|
||||
|
||||
async def _validate_connection(host: str) -> bool:
|
||||
cool = CoolMasterNet(host, DEFAULT_PORT)
|
||||
async def _validate_connection(host: str, send_wakeup_prompt: bool) -> bool:
|
||||
cool = CoolMasterNet(host, DEFAULT_PORT, send_initial_line_feed=send_wakeup_prompt)
|
||||
units = await cool.status()
|
||||
return bool(units)
|
||||
|
||||
@@ -45,6 +49,14 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def _get_data_schema(self) -> vol.Schema:
|
||||
schema_dict = DATA_SCHEMA.copy()
|
||||
|
||||
if self.show_advanced_options:
|
||||
schema_dict[vol.Required(CONF_SEND_WAKEUP_PROMPT, default=False)] = bool
|
||||
|
||||
return vol.Schema(schema_dict)
|
||||
|
||||
@callback
|
||||
def _async_get_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
supported_modes = [
|
||||
@@ -57,6 +69,7 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_SUPPORTED_MODES: supported_modes,
|
||||
CONF_SWING_SUPPORT: data[CONF_SWING_SUPPORT],
|
||||
CONF_SEND_WAKEUP_PROMPT: data.get(CONF_SEND_WAKEUP_PROMPT, False),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -64,15 +77,19 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
data_schema = self._get_data_schema()
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
|
||||
return self.async_show_form(step_id="user", data_schema=data_schema)
|
||||
|
||||
errors = {}
|
||||
|
||||
host = user_input[CONF_HOST]
|
||||
|
||||
try:
|
||||
result = await _validate_connection(host)
|
||||
result = await _validate_connection(
|
||||
host, user_input.get(CONF_SEND_WAKEUP_PROMPT, False)
|
||||
)
|
||||
if not result:
|
||||
errors["base"] = "no_units"
|
||||
except OSError:
|
||||
@@ -80,7 +97,7 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user", data_schema=data_schema, errors=errors
|
||||
)
|
||||
|
||||
return self._async_get_entry(user_input)
|
||||
|
||||
@@ -6,5 +6,6 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
CONF_SEND_WAKEUP_PROMPT = "send_wakeup_prompt"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pycoolmasternet_async"],
|
||||
"requirements": ["pycoolmasternet-async==0.2.2"]
|
||||
"requirements": ["pycoolmasternet-async==0.2.4"]
|
||||
}
|
||||
|
||||
@@ -14,10 +14,12 @@
|
||||
"heat_cool": "Support automatic heat/cool mode",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"off": "Can be turned off",
|
||||
"send_wakeup_prompt": "Send wakeup prompt",
|
||||
"swing_support": "Control swing mode"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your CoolMasterNet device."
|
||||
"host": "The hostname or IP address of your CoolMasterNet device.",
|
||||
"send_wakeup_prompt": "Send the coolmaster unit an empty commaand before issuing any actual command. This is required for serial models."
|
||||
},
|
||||
"description": "Set up your CoolMasterNet connection details."
|
||||
}
|
||||
|
||||
@@ -70,6 +70,7 @@ MEDIA_MODES = {
|
||||
"Favorites": "FAVORITES",
|
||||
"Internet Radio": "IRADIO",
|
||||
"USB/IPOD": "USB/IPOD",
|
||||
"USB": "USB",
|
||||
}
|
||||
|
||||
# Sub-modes of 'NET/USB'
|
||||
@@ -279,7 +280,7 @@ class DenonDevice(MediaPlayerEntity):
|
||||
def mute_volume(self, mute: bool) -> None:
|
||||
"""Mute (true) or unmute (false) media player."""
|
||||
mute_status = "ON" if mute else "OFF"
|
||||
self.telnet_command(f"MU{mute_status})")
|
||||
self.telnet_command(f"MU{mute_status}")
|
||||
|
||||
def media_play(self) -> None:
|
||||
"""Play media player."""
|
||||
|
||||
@@ -4,15 +4,15 @@ from homeassistant.const import STATE_HOME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_from_state_trigger,
|
||||
make_entity_state_trigger,
|
||||
make_entity_origin_state_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"entered_home": make_entity_state_trigger(DOMAIN, STATE_HOME),
|
||||
"left_home": make_entity_from_state_trigger(DOMAIN, from_state=STATE_HOME),
|
||||
"entered_home": make_entity_target_state_trigger(DOMAIN, STATE_HOME),
|
||||
"left_home": make_entity_origin_state_trigger(DOMAIN, from_state=STATE_HOME),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.46.0", "getmac==0.9.5"],
|
||||
"requirements": ["async-upnp-client==0.46.1", "getmac==0.9.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.46.0"],
|
||||
"requirements": ["async-upnp-client==0.46.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||
|
||||
@@ -110,7 +110,7 @@ async def async_register_dynalite_frontend(hass: HomeAssistant):
|
||||
frontend_url_path=DOMAIN,
|
||||
config_panel_domain=DOMAIN,
|
||||
webcomponent_name="dynalite-panel",
|
||||
module_url=f"{URL_BASE}/entrypoint-{build_id}.js",
|
||||
module_url=f"{URL_BASE}/entrypoint.{build_id}.js",
|
||||
embed_iframe=True,
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
"cpu_overheating": "CPU overheating",
|
||||
"none": "None",
|
||||
"pellets": "Pellets",
|
||||
"unkownn": "Unknown alarm"
|
||||
"unknown": "Unknown alarm"
|
||||
}
|
||||
},
|
||||
"convector_air_flow": {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["enocean"],
|
||||
"requirements": ["enocean==0.50"],
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.0.0",
|
||||
"aioesphomeapi==43.6.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -8,8 +8,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .manager import async_replace_device
|
||||
|
||||
@@ -22,13 +21,6 @@ class ESPHomeRepair(RepairsFlow):
|
||||
self._data = data
|
||||
super().__init__()
|
||||
|
||||
@callback
|
||||
def _async_get_placeholders(self) -> dict[str, str]:
|
||||
issue_registry = ir.async_get(self.hass)
|
||||
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
|
||||
assert issue is not None
|
||||
return issue.translation_placeholders or {}
|
||||
|
||||
|
||||
class DeviceConflictRepair(ESPHomeRepair):
|
||||
"""Handler for an issue fixing device conflict."""
|
||||
@@ -58,7 +50,6 @@ class DeviceConflictRepair(ESPHomeRepair):
|
||||
return self.async_show_menu(
|
||||
step_id="init",
|
||||
menu_options=["migrate", "manual"],
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
)
|
||||
|
||||
async def async_step_migrate(
|
||||
@@ -69,7 +60,6 @@ class DeviceConflictRepair(ESPHomeRepair):
|
||||
return self.async_show_form(
|
||||
step_id="migrate",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
)
|
||||
entry_id = self.entry_id
|
||||
await async_replace_device(self.hass, entry_id, self.stored_mac, self.mac)
|
||||
@@ -84,7 +74,6 @@ class DeviceConflictRepair(ESPHomeRepair):
|
||||
return self.async_show_form(
|
||||
step_id="manual",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(self.entry_id)
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_state_trigger(DOMAIN, STATE_ON),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
|
||||
31
homeassistant/components/fluss/__init__.py
Normal file
31
homeassistant/components/fluss/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""The Fluss+ integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import FlussDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.BUTTON]
|
||||
|
||||
|
||||
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FlussConfigEntry,
|
||||
) -> bool:
|
||||
"""Set up Fluss+ from a config entry."""
|
||||
coordinator = FlussDataUpdateCoordinator(hass, entry, entry.data[CONF_API_KEY])
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FlussConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
40
homeassistant/components/fluss/button.py
Normal file
40
homeassistant/components/fluss/button.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Support for Fluss Devices."""
|
||||
|
||||
from homeassistant.components.button import ButtonEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import FlussApiClientError, FlussDataUpdateCoordinator
|
||||
from .entity import FlussEntity
|
||||
|
||||
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FlussConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Fluss Devices, filtering out any invalid payloads."""
|
||||
coordinator = entry.runtime_data
|
||||
devices = coordinator.data
|
||||
|
||||
async_add_entities(
|
||||
FlussButton(coordinator, device_id, device)
|
||||
for device_id, device in devices.items()
|
||||
)
|
||||
|
||||
|
||||
class FlussButton(FlussEntity, ButtonEntity):
|
||||
"""Representation of a Fluss button device."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
try:
|
||||
await self.coordinator.api.async_trigger_device(self.device_id)
|
||||
except FlussApiClientError as err:
|
||||
raise HomeAssistantError(f"Failed to trigger device: {err}") from err
|
||||
55
homeassistant/components/fluss/config_flow.py
Normal file
55
homeassistant/components/fluss/config_flow.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Config flow for Fluss+ integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from fluss_api import (
|
||||
FlussApiClient,
|
||||
FlussApiClientAuthenticationError,
|
||||
FlussApiClientCommunicationError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): cv.string})
|
||||
|
||||
|
||||
class FlussConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Fluss+."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
api_key = user_input[CONF_API_KEY]
|
||||
self._async_abort_entries_match({CONF_API_KEY: api_key})
|
||||
client = FlussApiClient(
|
||||
user_input[CONF_API_KEY], session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
await client.async_get_devices()
|
||||
except FlussApiClientCommunicationError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FlussApiClientAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title="My Fluss+ Devices", data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
9
homeassistant/components/fluss/const.py
Normal file
9
homeassistant/components/fluss/const.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""Constants for the Fluss+ integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
DOMAIN = "fluss"
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = 60 # seconds
|
||||
UPDATE_INTERVAL_TIMEDELTA = timedelta(seconds=UPDATE_INTERVAL)
|
||||
50
homeassistant/components/fluss/coordinator.py
Normal file
50
homeassistant/components/fluss/coordinator.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""DataUpdateCoordinator for Fluss+ integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from fluss_api import (
|
||||
FlussApiClient,
|
||||
FlussApiClientAuthenticationError,
|
||||
FlussApiClientError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import LOGGER, UPDATE_INTERVAL_TIMEDELTA
|
||||
|
||||
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
|
||||
|
||||
|
||||
class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Manages fetching Fluss device data on a schedule."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: FlussConfigEntry, api_key: str
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.api = FlussApiClient(api_key, session=async_get_clientsession(hass))
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
name=f"Fluss+ ({slugify(api_key[:8])})",
|
||||
config_entry=config_entry,
|
||||
update_interval=UPDATE_INTERVAL_TIMEDELTA,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Fetch data from the Fluss API and return as a dictionary keyed by deviceId."""
|
||||
try:
|
||||
devices = await self.api.async_get_devices()
|
||||
except FlussApiClientAuthenticationError as err:
|
||||
raise ConfigEntryError(f"Authentication failed: {err}") from err
|
||||
except FlussApiClientError as err:
|
||||
raise UpdateFailed(f"Error fetching Fluss devices: {err}") from err
|
||||
|
||||
return {device["deviceId"]: device for device in devices.get("devices", [])}
|
||||
39
homeassistant/components/fluss/entity.py
Normal file
39
homeassistant/components/fluss/entity.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Base entities for the Fluss+ integration."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import FlussDataUpdateCoordinator
|
||||
|
||||
|
||||
class FlussEntity(CoordinatorEntity[FlussDataUpdateCoordinator]):
|
||||
"""Base class for Fluss entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FlussDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
device: dict,
|
||||
) -> None:
|
||||
"""Initialize the entity with a device ID and device data."""
|
||||
super().__init__(coordinator)
|
||||
self.device_id = device_id
|
||||
self._attr_unique_id = device_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={("fluss", device_id)},
|
||||
name=device.get("deviceName"),
|
||||
manufacturer="Fluss",
|
||||
model="Fluss+ Device",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return super().available and self.device_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def device(self) -> dict:
|
||||
"""Return the stored device data."""
|
||||
return self.coordinator.data[self.device_id]
|
||||
11
homeassistant/components/fluss/manifest.json
Normal file
11
homeassistant/components/fluss/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "fluss",
|
||||
"name": "Fluss+",
|
||||
"codeowners": ["@fluss"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/fluss",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["fluss-api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fluss-api==0.1.9.20"]
|
||||
}
|
||||
69
homeassistant/components/fluss/quality_scale.yaml
Normal file
69
homeassistant/components/fluss/quality_scale.yaml
Normal file
@@ -0,0 +1,69 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No actions present
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
entity-translations: done
|
||||
entity-device-class: done
|
||||
devices: done
|
||||
entity-category: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
Not needed
|
||||
discovery: todo
|
||||
stale-devices: todo
|
||||
diagnostics: todo
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
No icons used
|
||||
reconfiguration-flow: todo
|
||||
dynamic-devices: todo
|
||||
discovery-update-info: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No issues to repair
|
||||
docs-use-cases: done
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: done
|
||||
docs-data-update: todo
|
||||
docs-known-limitations: done
|
||||
docs-troubleshooting: todo
|
||||
docs-examples: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
23
homeassistant/components/fluss/strings.json
Normal file
23
homeassistant/components/fluss/strings.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The API key found in the profile page of the Fluss+ app."
|
||||
},
|
||||
"description": "Your Fluss API key, available in the profile page of the Fluss+ app"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["ffmpeg"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/freebox",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["freebox_api"],
|
||||
"requirements": ["freebox-api==1.2.2"],
|
||||
|
||||
@@ -6,7 +6,7 @@ from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from pyfritzhome import Fritzhome, FritzhomeDevice, LoginError
|
||||
from pyfritzhome.devicetypes import FritzhomeTemplate
|
||||
from pyfritzhome.devicetypes import FritzhomeTemplate, FritzhomeTrigger
|
||||
from requests.exceptions import ConnectionError as RequestConnectionError, HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -27,6 +27,7 @@ class FritzboxCoordinatorData:
|
||||
|
||||
devices: dict[str, FritzhomeDevice]
|
||||
templates: dict[str, FritzhomeTemplate]
|
||||
triggers: dict[str, FritzhomeTrigger]
|
||||
supported_color_properties: dict[str, tuple[dict, list]]
|
||||
|
||||
|
||||
@@ -37,6 +38,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
configuration_url: str
|
||||
fritz: Fritzhome
|
||||
has_templates: bool
|
||||
has_triggers: bool
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: FritzboxConfigEntry) -> None:
|
||||
"""Initialize the Fritzbox Smarthome device coordinator."""
|
||||
@@ -50,8 +52,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
self.new_devices: set[str] = set()
|
||||
self.new_templates: set[str] = set()
|
||||
self.new_triggers: set[str] = set()
|
||||
|
||||
self.data = FritzboxCoordinatorData({}, {}, {})
|
||||
self.data = FritzboxCoordinatorData({}, {}, {}, {})
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
@@ -74,6 +77,11 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
)
|
||||
LOGGER.debug("enable smarthome templates: %s", self.has_templates)
|
||||
|
||||
self.has_triggers = await self.hass.async_add_executor_job(
|
||||
self.fritz.has_triggers
|
||||
)
|
||||
LOGGER.debug("enable smarthome triggers: %s", self.has_triggers)
|
||||
|
||||
self.configuration_url = self.fritz.get_prefixed_host()
|
||||
|
||||
await self.async_config_entry_first_refresh()
|
||||
@@ -92,7 +100,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
available_main_ains = [
|
||||
ain
|
||||
for ain, dev in data.devices.items() | data.templates.items()
|
||||
for ain, dev in (data.devices | data.templates | data.triggers).items()
|
||||
if dev.device_and_unit_id[1] is None
|
||||
]
|
||||
device_reg = dr.async_get(self.hass)
|
||||
@@ -112,6 +120,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
self.fritz.update_devices(ignore_removed=False)
|
||||
if self.has_templates:
|
||||
self.fritz.update_templates(ignore_removed=False)
|
||||
if self.has_triggers:
|
||||
self.fritz.update_triggers(ignore_removed=False)
|
||||
|
||||
except RequestConnectionError as ex:
|
||||
raise UpdateFailed from ex
|
||||
except HTTPError:
|
||||
@@ -123,6 +134,8 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
self.fritz.update_devices(ignore_removed=False)
|
||||
if self.has_templates:
|
||||
self.fritz.update_templates(ignore_removed=False)
|
||||
if self.has_triggers:
|
||||
self.fritz.update_triggers(ignore_removed=False)
|
||||
|
||||
devices = self.fritz.get_devices()
|
||||
device_data = {}
|
||||
@@ -156,12 +169,20 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
for template in templates:
|
||||
template_data[template.ain] = template
|
||||
|
||||
trigger_data = {}
|
||||
if self.has_triggers:
|
||||
triggers = self.fritz.get_triggers()
|
||||
for trigger in triggers:
|
||||
trigger_data[trigger.ain] = trigger
|
||||
|
||||
self.new_devices = device_data.keys() - self.data.devices.keys()
|
||||
self.new_templates = template_data.keys() - self.data.templates.keys()
|
||||
self.new_triggers = trigger_data.keys() - self.data.triggers.keys()
|
||||
|
||||
return FritzboxCoordinatorData(
|
||||
devices=device_data,
|
||||
templates=template_data,
|
||||
triggers=trigger_data,
|
||||
supported_color_properties=supported_color_properties,
|
||||
)
|
||||
|
||||
@@ -193,6 +214,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
if (
|
||||
self.data.devices.keys() - new_data.devices.keys()
|
||||
or self.data.templates.keys() - new_data.templates.keys()
|
||||
or self.data.triggers.keys() - new_data.triggers.keys()
|
||||
):
|
||||
self.cleanup_removed_devices(new_data)
|
||||
|
||||
|
||||
@@ -4,14 +4,17 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyfritzhome.devicetypes import FritzhomeTrigger
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
from .entity import FritzBoxDeviceEntity, FritzBoxEntity
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -26,21 +29,27 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
@callback
|
||||
def _add_entities(devices: set[str] | None = None) -> None:
|
||||
"""Add devices."""
|
||||
def _add_entities(
|
||||
devices: set[str] | None = None, triggers: set[str] | None = None
|
||||
) -> None:
|
||||
"""Add devices and triggers."""
|
||||
if devices is None:
|
||||
devices = coordinator.new_devices
|
||||
if not devices:
|
||||
if triggers is None:
|
||||
triggers = coordinator.new_triggers
|
||||
if not devices and not triggers:
|
||||
return
|
||||
async_add_entities(
|
||||
entities = [
|
||||
FritzboxSwitch(coordinator, ain)
|
||||
for ain in devices
|
||||
if coordinator.data.devices[ain].has_switch
|
||||
)
|
||||
] + [FritzboxTrigger(coordinator, ain) for ain in triggers]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_entities))
|
||||
|
||||
_add_entities(set(coordinator.data.devices))
|
||||
_add_entities(set(coordinator.data.devices), set(coordinator.data.triggers))
|
||||
|
||||
|
||||
class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
|
||||
@@ -70,3 +79,42 @@ class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="manual_switching_disabled",
|
||||
)
|
||||
|
||||
|
||||
class FritzboxTrigger(FritzBoxEntity, SwitchEntity):
|
||||
"""The switch class for FRITZ!SmartHome triggers."""
|
||||
|
||||
@property
|
||||
def data(self) -> FritzhomeTrigger:
|
||||
"""Return the trigger data entity."""
|
||||
return self.coordinator.data.triggers[self.ain]
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device specific attributes."""
|
||||
return DeviceInfo(
|
||||
name=self.data.name,
|
||||
identifiers={(DOMAIN, self.ain)},
|
||||
configuration_url=self.coordinator.configuration_url,
|
||||
manufacturer="FRITZ!",
|
||||
model="SmartHome Routine",
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the trigger is active."""
|
||||
return self.data.active # type: ignore [no-any-return]
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Activate the trigger."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self.coordinator.fritz.set_trigger_active, self.ain
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Deactivate the trigger."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self.coordinator.fritz.set_trigger_inactive, self.ain
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.const import (
|
||||
EVENT_PANELS_UPDATED,
|
||||
EVENT_THEMES_UPDATED,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, async_get_hass, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers.icon import async_get_icons
|
||||
@@ -41,6 +41,7 @@ from .storage import async_setup_frontend_storage
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "frontend"
|
||||
CONF_NAME_DARK = "name_dark"
|
||||
CONF_THEMES = "themes"
|
||||
CONF_THEMES_MODES = "modes"
|
||||
CONF_THEMES_LIGHT = "light"
|
||||
@@ -526,6 +527,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _validate_selected_theme(theme: str) -> str:
|
||||
"""Validate that a user selected theme is a valid theme."""
|
||||
if theme in (DEFAULT_THEME, VALUE_NO_THEME):
|
||||
return theme
|
||||
hass = async_get_hass()
|
||||
if theme not in hass.data[DATA_THEMES]:
|
||||
raise vol.Invalid(f"Theme {theme} not found")
|
||||
return theme
|
||||
|
||||
|
||||
async def _async_setup_themes(
|
||||
hass: HomeAssistant, themes: dict[str, Any] | None
|
||||
) -> None:
|
||||
@@ -569,27 +580,32 @@ async def _async_setup_themes(
|
||||
@callback
|
||||
def set_theme(call: ServiceCall) -> None:
|
||||
"""Set backend-preferred theme."""
|
||||
name = call.data[CONF_NAME]
|
||||
mode = call.data.get("mode", "light")
|
||||
|
||||
if (
|
||||
name not in (DEFAULT_THEME, VALUE_NO_THEME)
|
||||
and name not in hass.data[DATA_THEMES]
|
||||
):
|
||||
_LOGGER.warning("Theme %s not found", name)
|
||||
return
|
||||
def _update_hass_theme(theme: str, light: bool) -> None:
|
||||
theme_key = DATA_DEFAULT_THEME if light else DATA_DEFAULT_DARK_THEME
|
||||
if theme == VALUE_NO_THEME:
|
||||
to_set = DEFAULT_THEME if light else None
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Theme %s set as default %s theme",
|
||||
theme,
|
||||
"light" if light else "dark",
|
||||
)
|
||||
to_set = theme
|
||||
hass.data[theme_key] = to_set
|
||||
|
||||
light_mode = mode == "light"
|
||||
|
||||
theme_key = DATA_DEFAULT_THEME if light_mode else DATA_DEFAULT_DARK_THEME
|
||||
|
||||
if name == VALUE_NO_THEME:
|
||||
to_set = DEFAULT_THEME if light_mode else None
|
||||
name = call.data.get(CONF_NAME)
|
||||
if name is not None and CONF_MODE in call.data:
|
||||
mode = call.data.get("mode", "light")
|
||||
light_mode = mode == "light"
|
||||
_update_hass_theme(name, light_mode)
|
||||
else:
|
||||
_LOGGER.info("Theme %s set as default %s theme", name, mode)
|
||||
to_set = name
|
||||
name_dark = call.data.get(CONF_NAME_DARK)
|
||||
if name:
|
||||
_update_hass_theme(name, True)
|
||||
if name_dark:
|
||||
_update_hass_theme(name_dark, False)
|
||||
|
||||
hass.data[theme_key] = to_set
|
||||
store.async_delay_save(
|
||||
lambda: {
|
||||
DATA_DEFAULT_THEME: hass.data[DATA_DEFAULT_THEME],
|
||||
@@ -624,11 +640,13 @@ async def _async_setup_themes(
|
||||
DOMAIN,
|
||||
SERVICE_SET_THEME,
|
||||
set_theme,
|
||||
vol.Schema(
|
||||
vol.All(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MODE): vol.Any("dark", "light"),
|
||||
}
|
||||
vol.Optional(CONF_NAME): _validate_selected_theme,
|
||||
vol.Exclusive(CONF_NAME_DARK, "dark_modes"): _validate_selected_theme,
|
||||
vol.Exclusive(CONF_MODE, "dark_modes"): vol.Any("dark", "light"),
|
||||
},
|
||||
cv.has_at_least_one_key(CONF_NAME, CONF_NAME_DARK),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251203.2"]
|
||||
"requirements": ["home-assistant-frontend==20251203.3"]
|
||||
}
|
||||
|
||||
@@ -3,17 +3,15 @@
|
||||
set_theme:
|
||||
fields:
|
||||
name:
|
||||
required: true
|
||||
required: false
|
||||
example: "default"
|
||||
selector:
|
||||
theme:
|
||||
include_default: true
|
||||
mode:
|
||||
default: "light"
|
||||
name_dark:
|
||||
required: false
|
||||
example: "default"
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "dark"
|
||||
- "light"
|
||||
translation_key: mode
|
||||
theme:
|
||||
include_default: true
|
||||
reload_themes:
|
||||
|
||||
@@ -7,32 +7,24 @@
|
||||
"name": "Winter mode"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"mode": {
|
||||
"options": {
|
||||
"dark": "Dark",
|
||||
"light": "Light"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload_themes": {
|
||||
"description": "Reloads themes from the YAML-configuration.",
|
||||
"name": "Reload themes"
|
||||
},
|
||||
"set_theme": {
|
||||
"description": "Sets the default theme Home Assistant uses. Can be overridden by a user.",
|
||||
"description": "Sets the theme Home Assistant uses. Can be overridden by a user.",
|
||||
"fields": {
|
||||
"mode": {
|
||||
"description": "Theme mode.",
|
||||
"name": "Mode"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name of a theme.",
|
||||
"description": "Name of the theme that is used by default.",
|
||||
"name": "Theme"
|
||||
},
|
||||
"name_dark": {
|
||||
"description": "Alternative dark-mode theme that is used by default.",
|
||||
"name": "Dark theme override"
|
||||
}
|
||||
},
|
||||
"name": "Set the default theme"
|
||||
"name": "Set theme"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.stream import (
|
||||
CONF_RTSP_TRANSPORT,
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_AUTHENTICATION, CONF_VERIFY_SSL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import CONF_FRAMERATE, CONF_LIMIT_REFETCH_TO_URL_CHANGE, SECTION_ADVANCED
|
||||
|
||||
DOMAIN = "generic"
|
||||
PLATFORMS = [Platform.CAMERA]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
@@ -47,3 +55,38 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1:
|
||||
# Migrate to advanced section
|
||||
new_options = {**entry.options}
|
||||
advanced = new_options[SECTION_ADVANCED] = {
|
||||
CONF_FRAMERATE: new_options.pop(CONF_FRAMERATE),
|
||||
CONF_VERIFY_SSL: new_options.pop(CONF_VERIFY_SSL),
|
||||
}
|
||||
|
||||
# migrate optional fields
|
||||
for key in (
|
||||
CONF_RTSP_TRANSPORT,
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
CONF_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE,
|
||||
):
|
||||
if key in new_options:
|
||||
advanced[key] = new_options.pop(key)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, options=new_options, version=2)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -41,6 +41,7 @@ from .const import (
|
||||
CONF_STILL_IMAGE_URL,
|
||||
CONF_STREAM_SOURCE,
|
||||
GET_IMAGE_TIMEOUT,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -62,9 +63,11 @@ def generate_auth(device_info: Mapping[str, Any]) -> httpx.Auth | None:
|
||||
"""Generate httpx.Auth object from credentials."""
|
||||
username: str | None = device_info.get(CONF_USERNAME)
|
||||
password: str | None = device_info.get(CONF_PASSWORD)
|
||||
authentication = device_info.get(CONF_AUTHENTICATION)
|
||||
if username and password:
|
||||
if authentication == HTTP_DIGEST_AUTHENTICATION:
|
||||
if (
|
||||
device_info[SECTION_ADVANCED].get(CONF_AUTHENTICATION)
|
||||
== HTTP_DIGEST_AUTHENTICATION
|
||||
):
|
||||
return httpx.DigestAuth(username=username, password=password)
|
||||
return httpx.BasicAuth(username=username, password=password)
|
||||
return None
|
||||
@@ -99,14 +102,16 @@ class GenericCamera(Camera):
|
||||
if self._stream_source:
|
||||
self._stream_source = Template(self._stream_source, hass)
|
||||
self._attr_supported_features = CameraEntityFeature.STREAM
|
||||
self._limit_refetch = device_info.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False)
|
||||
self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE]
|
||||
self._limit_refetch = device_info[SECTION_ADVANCED].get(
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE, False
|
||||
)
|
||||
self._attr_frame_interval = 1 / device_info[SECTION_ADVANCED][CONF_FRAMERATE]
|
||||
self.content_type = device_info[CONF_CONTENT_TYPE]
|
||||
self.verify_ssl = device_info[CONF_VERIFY_SSL]
|
||||
if device_info.get(CONF_RTSP_TRANSPORT):
|
||||
self.stream_options[CONF_RTSP_TRANSPORT] = device_info[CONF_RTSP_TRANSPORT]
|
||||
self.verify_ssl = device_info[SECTION_ADVANCED][CONF_VERIFY_SSL]
|
||||
if rtsp_transport := device_info[SECTION_ADVANCED].get(CONF_RTSP_TRANSPORT):
|
||||
self.stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport
|
||||
self._auth = generate_auth(device_info)
|
||||
if device_info.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
if device_info[SECTION_ADVANCED].get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
self.stream_options[CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = True
|
||||
|
||||
self._last_url = None
|
||||
|
||||
@@ -50,10 +50,18 @@ from homeassistant.const import (
|
||||
HTTP_DIGEST_AUTHENTICATION,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template as template_helper
|
||||
from homeassistant.helpers.entity_platform import PlatformData
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .camera import GenericCamera, generate_auth
|
||||
@@ -67,17 +75,20 @@ from .const import (
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
GET_IMAGE_TIMEOUT,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DATA = {
|
||||
CONF_NAME: DEFAULT_NAME,
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_FRAMERATE: 2,
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_FRAMERATE: 2,
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
},
|
||||
}
|
||||
|
||||
SUPPORTED_IMAGE_TYPES = {"png", "jpeg", "gif", "svg+xml", "webp"}
|
||||
@@ -94,58 +105,47 @@ class InvalidStreamException(HomeAssistantError):
|
||||
|
||||
|
||||
def build_schema(
|
||||
user_input: Mapping[str, Any],
|
||||
is_options_flow: bool = False,
|
||||
show_advanced_options: bool = False,
|
||||
) -> vol.Schema:
|
||||
"""Create schema for camera config setup."""
|
||||
rtsp_options = [
|
||||
SelectOptionDict(
|
||||
value=value,
|
||||
label=name,
|
||||
)
|
||||
for value, name in RTSP_TRANSPORTS.items()
|
||||
]
|
||||
|
||||
advanced_section = {
|
||||
vol.Required(CONF_FRAMERATE): vol.All(
|
||||
vol.Range(min=0, min_included=False), cv.positive_float
|
||||
),
|
||||
vol.Required(CONF_VERIFY_SSL): bool,
|
||||
vol.Optional(CONF_RTSP_TRANSPORT): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=rtsp_options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_AUTHENTICATION): vol.In(
|
||||
[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]
|
||||
),
|
||||
}
|
||||
spec = {
|
||||
vol.Optional(
|
||||
CONF_STILL_IMAGE_URL,
|
||||
description={"suggested_value": user_input.get(CONF_STILL_IMAGE_URL, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_STREAM_SOURCE,
|
||||
description={"suggested_value": user_input.get(CONF_STREAM_SOURCE, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_RTSP_TRANSPORT,
|
||||
description={"suggested_value": user_input.get(CONF_RTSP_TRANSPORT)},
|
||||
): vol.In(RTSP_TRANSPORTS),
|
||||
vol.Optional(
|
||||
CONF_AUTHENTICATION,
|
||||
description={"suggested_value": user_input.get(CONF_AUTHENTICATION)},
|
||||
): vol.In([HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]),
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
description={"suggested_value": user_input.get(CONF_USERNAME, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
description={"suggested_value": user_input.get(CONF_PASSWORD, "")},
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_FRAMERATE,
|
||||
description={"suggested_value": user_input.get(CONF_FRAMERATE, 2)},
|
||||
): vol.All(vol.Range(min=0, min_included=False), cv.positive_float),
|
||||
vol.Required(
|
||||
CONF_VERIFY_SSL, default=user_input.get(CONF_VERIFY_SSL, True)
|
||||
): bool,
|
||||
vol.Optional(CONF_STREAM_SOURCE): str,
|
||||
vol.Optional(CONF_STILL_IMAGE_URL): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED): section(
|
||||
vol.Schema(advanced_section), {"collapsed": True}
|
||||
),
|
||||
}
|
||||
if is_options_flow:
|
||||
spec[
|
||||
vol.Required(
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE,
|
||||
default=user_input.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False),
|
||||
)
|
||||
] = bool
|
||||
advanced_section[vol.Optional(CONF_LIMIT_REFETCH_TO_URL_CHANGE)] = bool
|
||||
if show_advanced_options:
|
||||
spec[
|
||||
vol.Required(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
default=user_input.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False),
|
||||
)
|
||||
] = bool
|
||||
advanced_section[vol.Optional(CONF_USE_WALLCLOCK_AS_TIMESTAMPS)] = bool
|
||||
|
||||
return vol.Schema(spec)
|
||||
|
||||
|
||||
@@ -187,7 +187,7 @@ async def async_test_still(
|
||||
return {CONF_STILL_IMAGE_URL: "malformed_url"}, None
|
||||
if not yarl_url.is_absolute():
|
||||
return {CONF_STILL_IMAGE_URL: "relative_url"}, None
|
||||
verify_ssl = info[CONF_VERIFY_SSL]
|
||||
verify_ssl = info[SECTION_ADVANCED][CONF_VERIFY_SSL]
|
||||
auth = generate_auth(info)
|
||||
try:
|
||||
async_client = get_async_client(hass, verify_ssl=verify_ssl)
|
||||
@@ -268,9 +268,9 @@ async def async_test_and_preview_stream(
|
||||
_LOGGER.warning("Problem rendering template %s: %s", stream_source, err)
|
||||
raise InvalidStreamException("template_error") from err
|
||||
stream_options: dict[str, str | bool | float] = {}
|
||||
if rtsp_transport := info.get(CONF_RTSP_TRANSPORT):
|
||||
if rtsp_transport := info[SECTION_ADVANCED].get(CONF_RTSP_TRANSPORT):
|
||||
stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport
|
||||
if info.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
if info[SECTION_ADVANCED].get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
stream_options[CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = True
|
||||
|
||||
try:
|
||||
@@ -326,7 +326,7 @@ def register_still_preview(hass: HomeAssistant) -> None:
|
||||
class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for generic IP camera."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Generic ConfigFlow."""
|
||||
@@ -381,7 +381,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
user_input = DEFAULT_DATA.copy()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=build_schema(user_input),
|
||||
data_schema=self.add_suggested_values_to_schema(build_schema(), user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -449,13 +449,19 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
if (
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS
|
||||
not in user_input[SECTION_ADVANCED]
|
||||
):
|
||||
data[SECTION_ADVANCED][CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = (
|
||||
self.config_entry.options[SECTION_ADVANCED].get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
)
|
||||
)
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_image_settings = data
|
||||
@@ -464,10 +470,12 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
user_input = self.user_input
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=build_schema(
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
build_schema(
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
user_input or self.config_entry.options,
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -583,7 +591,8 @@ async def ws_start_preview(
|
||||
_LOGGER.debug("Got preview still URL: %s", ha_still_url)
|
||||
|
||||
if ha_stream := flow.preview_stream:
|
||||
ha_stream_url = ha_stream.endpoint_url(HLS_PROVIDER)
|
||||
# HLS player needs an absolute URL as base for constructing child playlist URLs
|
||||
ha_stream_url = f"{get_url(hass)}{ha_stream.endpoint_url(HLS_PROVIDER)}"
|
||||
_LOGGER.debug("Got preview stream URL: %s", ha_stream_url)
|
||||
|
||||
connection.send_message(
|
||||
|
||||
@@ -9,3 +9,4 @@ CONF_STILL_IMAGE_URL = "still_image_url"
|
||||
CONF_STREAM_SOURCE = "stream_source"
|
||||
CONF_FRAMERATE = "framerate"
|
||||
GET_IMAGE_TIMEOUT = 10
|
||||
SECTION_ADVANCED = "advanced"
|
||||
|
||||
@@ -26,17 +26,24 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"authentication": "Authentication",
|
||||
"framerate": "Frame rate (Hz)",
|
||||
"limit_refetch_to_url_change": "Limit refetch to URL change",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"rtsp_transport": "RTSP transport protocol",
|
||||
"still_image_url": "Still image URL (e.g. http://...)",
|
||||
"stream_source": "Stream source URL (e.g. rtsp://...)",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"description": "Enter the settings to connect to the camera."
|
||||
"sections": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"authentication": "Authentication",
|
||||
"framerate": "Frame rate (Hz)",
|
||||
"limit_refetch_to_url_change": "Limit refetch to URL change",
|
||||
"rtsp_transport": "RTSP transport protocol",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"description": "Advanced settings are only needed for special cases. Leave them unchanged unless you know what you are doing.",
|
||||
"name": "Advanced settings"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user_confirm": {
|
||||
"data": {
|
||||
@@ -70,19 +77,27 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"authentication": "[%key:component::generic::config::step::user::data::authentication%]",
|
||||
"framerate": "[%key:component::generic::config::step::user::data::framerate%]",
|
||||
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::data::limit_refetch_to_url_change%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"rtsp_transport": "[%key:component::generic::config::step::user::data::rtsp_transport%]",
|
||||
"still_image_url": "[%key:component::generic::config::step::user::data::still_image_url%]",
|
||||
"stream_source": "[%key:component::generic::config::step::user::data::stream_source%]",
|
||||
"use_wallclock_as_timestamps": "Use wallclock as timestamps",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
"sections": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"authentication": "[%key:component::generic::config::step::user::sections::advanced::data::authentication%]",
|
||||
"framerate": "[%key:component::generic::config::step::user::sections::advanced::data::framerate%]",
|
||||
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::sections::advanced::data::limit_refetch_to_url_change%]",
|
||||
"rtsp_transport": "[%key:component::generic::config::step::user::sections::advanced::data::rtsp_transport%]",
|
||||
"use_wallclock_as_timestamps": "Use wallclock as timestamps",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
},
|
||||
"description": "[%key:component::generic::config::step::user::sections::advanced::description%]",
|
||||
"name": "[%key:component::generic::config::step::user::sections::advanced::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user_confirm": {
|
||||
|
||||
@@ -8,4 +8,4 @@ HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
# When changing this version, also update the corresponding SHA hash (_GO2RTC_SHA)
|
||||
# in script/hassfest/docker.py.
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
RECOMMENDED_VERSION = "1.9.13"
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["go2rtc-client==0.3.0"],
|
||||
"requirements": ["go2rtc-client==0.4.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
"homekit": {
|
||||
"models": ["iSmartGate"]
|
||||
},
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ismartgate"],
|
||||
"requirements": ["ismartgate==5.0.2"]
|
||||
|
||||
@@ -101,6 +101,15 @@ def _is_location_already_configured(
|
||||
return False
|
||||
|
||||
|
||||
def _is_location_name_already_configured(hass: HomeAssistant, new_data: str) -> bool:
|
||||
"""Check if the location name is already configured."""
|
||||
for entry in hass.config_entries.async_entries(DOMAIN):
|
||||
for subentry in entry.subentries.values():
|
||||
if subentry.title.lower() == new_data.lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class GoogleAirQualityConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google AirQuality."""
|
||||
|
||||
@@ -178,8 +187,19 @@ class LocationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
description_placeholders: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
|
||||
return self.async_abort(reason="already_configured")
|
||||
errors["base"] = "location_already_configured"
|
||||
if _is_location_name_already_configured(self.hass, user_input[CONF_NAME]):
|
||||
errors["base"] = "location_name_already_configured"
|
||||
api: GoogleAirQualityApi = self._get_entry().runtime_data.api
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="location",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
_get_location_schema(self.hass), user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
if await _validate_input(user_input, api, errors, description_placeholders):
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME],
|
||||
|
||||
@@ -47,12 +47,12 @@
|
||||
"config_subentries": {
|
||||
"location": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||
"unable_to_fetch": "[%key:component::google_air_quality::common::unable_to_fetch%]"
|
||||
},
|
||||
"entry_type": "Air quality location",
|
||||
"error": {
|
||||
"no_data_for_location": "Information is unavailable for this location. Please try a different location.",
|
||||
"location_already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||
"location_name_already_configured": "Location name already configured.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
|
||||
@@ -7,6 +7,7 @@ ATTR_CC = "cc"
|
||||
ATTR_ENABLED = "enabled"
|
||||
ATTR_END = "end"
|
||||
ATTR_FROM = "from"
|
||||
ATTR_ALIAS_FROM = "alias_from"
|
||||
ATTR_ME = "me"
|
||||
ATTR_MESSAGE = "message"
|
||||
ATTR_PLAIN_TEXT = "plain_text"
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from email.mime.text import MIMEText
|
||||
from email.utils import formataddr
|
||||
from typing import Any
|
||||
|
||||
from googleapiclient.http import HttpRequest
|
||||
@@ -17,10 +18,20 @@ from homeassistant.components.notify import (
|
||||
BaseNotificationService,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .api import AsyncConfigEntryAuth
|
||||
from .const import ATTR_BCC, ATTR_CC, ATTR_FROM, ATTR_ME, ATTR_SEND, DATA_AUTH
|
||||
from .const import (
|
||||
ATTR_ALIAS_FROM,
|
||||
ATTR_BCC,
|
||||
ATTR_CC,
|
||||
ATTR_FROM,
|
||||
ATTR_ME,
|
||||
ATTR_SEND,
|
||||
DATA_AUTH,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
@@ -47,7 +58,17 @@ class GMailNotificationService(BaseNotificationService):
|
||||
email = MIMEText(message, "html")
|
||||
if to_addrs := kwargs.get(ATTR_TARGET):
|
||||
email["To"] = ", ".join(to_addrs)
|
||||
email["From"] = data.get(ATTR_FROM, ATTR_ME)
|
||||
|
||||
email_from = data.get(ATTR_FROM, ATTR_ME)
|
||||
if alias := data.get(ATTR_ALIAS_FROM):
|
||||
if email_from == ATTR_ME:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_from_for_alias",
|
||||
)
|
||||
email["From"] = formataddr((alias, email_from))
|
||||
else:
|
||||
email["From"] = email_from
|
||||
email["Subject"] = title
|
||||
email[ATTR_CC] = ", ".join(data.get(ATTR_CC, []))
|
||||
email[ATTR_BCC] = ", ".join(data.get(ATTR_BCC, []))
|
||||
@@ -57,9 +78,9 @@ class GMailNotificationService(BaseNotificationService):
|
||||
msg: HttpRequest
|
||||
users = (await self.auth.get_resource()).users()
|
||||
if data.get(ATTR_SEND) is False:
|
||||
msg = users.drafts().create(userId=email["From"], body={ATTR_MESSAGE: body})
|
||||
msg = users.drafts().create(userId=email_from, body={ATTR_MESSAGE: body})
|
||||
else:
|
||||
if not to_addrs:
|
||||
raise ValueError("recipient address required")
|
||||
msg = users.messages().send(userId=email["From"], body=body)
|
||||
msg = users.messages().send(userId=email_from, body=body)
|
||||
await self.hass.async_add_executor_job(msg.execute)
|
||||
|
||||
@@ -47,6 +47,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"missing_from_for_alias": {
|
||||
"message": "Missing 'from' email when setting an alias to show. You have to provide a 'from' email"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_vacation": {
|
||||
"description": "Sets vacation responder settings for Google Mail.",
|
||||
|
||||
@@ -16,6 +16,7 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
|
||||
from .const import (
|
||||
ABORT_NO_PLANTS,
|
||||
@@ -23,12 +24,13 @@ from .const import (
|
||||
AUTH_PASSWORD,
|
||||
CONF_AUTH_TYPE,
|
||||
CONF_PLANT_ID,
|
||||
CONF_REGION,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
ERROR_CANNOT_CONNECT,
|
||||
ERROR_INVALID_AUTH,
|
||||
LOGIN_INVALID_AUTH_CODE,
|
||||
SERVER_URLS,
|
||||
SERVER_URLS_NAMES,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -67,10 +69,13 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.auth_type = AUTH_PASSWORD
|
||||
|
||||
# Traditional username/password authentication
|
||||
# Convert region name to URL - guaranteed to exist since vol.In validates it
|
||||
server_url = SERVER_URLS_NAMES[user_input[CONF_REGION]]
|
||||
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
|
||||
)
|
||||
self.api.server_url = user_input[CONF_URL]
|
||||
self.api.server_url = server_url
|
||||
|
||||
try:
|
||||
login_response = await self.hass.async_add_executor_job(
|
||||
@@ -91,6 +96,8 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self.user_id = login_response["user"]["id"]
|
||||
self.data = user_input
|
||||
# Store the actual URL, not the region name
|
||||
self.data[CONF_URL] = server_url
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
@@ -104,8 +111,11 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.auth_type = AUTH_API_TOKEN
|
||||
|
||||
# Using token authentication
|
||||
token = user_input[CONF_TOKEN]
|
||||
self.api = growattServer.OpenApiV1(token=token)
|
||||
# Convert region name to URL - guaranteed to exist since vol.In validates it
|
||||
server_url = SERVER_URLS_NAMES[user_input[CONF_REGION]]
|
||||
|
||||
self.api = growattServer.OpenApiV1(token=user_input[CONF_TOKEN])
|
||||
self.api.server_url = server_url
|
||||
|
||||
# Verify token by fetching plant list
|
||||
try:
|
||||
@@ -127,6 +137,8 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
|
||||
self.data = user_input
|
||||
# Store the actual URL, not the region name
|
||||
self.data[CONF_URL] = server_url
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
@@ -139,7 +151,12 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_URL, default=DEFAULT_URL): vol.In(SERVER_URLS),
|
||||
vol.Required(CONF_REGION, default=DEFAULT_URL): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(SERVER_URLS_NAMES.keys()),
|
||||
translation_key="region",
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -155,6 +172,12 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOKEN): str,
|
||||
vol.Required(CONF_REGION, default=DEFAULT_URL): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(SERVER_URLS_NAMES.keys()),
|
||||
translation_key="region",
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from homeassistant.const import Platform
|
||||
|
||||
CONF_PLANT_ID = "plant_id"
|
||||
CONF_REGION = "region"
|
||||
|
||||
|
||||
# API key support
|
||||
@@ -18,13 +19,14 @@ DEFAULT_PLANT_ID = "0"
|
||||
|
||||
DEFAULT_NAME = "Growatt"
|
||||
|
||||
SERVER_URLS = [
|
||||
"https://openapi.growatt.com/", # Other regional server
|
||||
"https://openapi-cn.growatt.com/", # Chinese server
|
||||
"https://openapi-us.growatt.com/", # North American server
|
||||
"https://openapi-au.growatt.com/", # Australia Server
|
||||
"http://server.smten.com/", # smten server
|
||||
]
|
||||
SERVER_URLS_NAMES = {
|
||||
"north_america": "https://openapi-us.growatt.com/",
|
||||
"australia_new_zealand": "https://openapi-au.growatt.com/",
|
||||
"china": "https://openapi-cn.growatt.com/",
|
||||
"other_regions": "https://openapi.growatt.com/",
|
||||
"smten_server": "http://server.smten.com/",
|
||||
"era_server": "http://ess-server.atesspower.com/",
|
||||
}
|
||||
|
||||
DEPRECATED_URLS = [
|
||||
"https://server.growatt.com/",
|
||||
@@ -32,7 +34,7 @@ DEPRECATED_URLS = [
|
||||
"https://server-us.growatt.com/",
|
||||
]
|
||||
|
||||
DEFAULT_URL = SERVER_URLS[0]
|
||||
DEFAULT_URL = "other_regions"
|
||||
|
||||
DOMAIN = "growatt_server"
|
||||
|
||||
|
||||
@@ -24,9 +24,7 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: todo
|
||||
comment: Update server URL dropdown to show regional descriptions (e.g., 'China', 'United States') instead of raw URLs.
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"password_auth": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"url": "Server region",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"title": "Enter your Growatt login credentials"
|
||||
@@ -26,7 +26,8 @@
|
||||
},
|
||||
"token_auth": {
|
||||
"data": {
|
||||
"token": "API Token"
|
||||
"token": "API Token",
|
||||
"url": "Server region"
|
||||
},
|
||||
"description": "Token authentication is only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
|
||||
"title": "Enter your API token"
|
||||
@@ -530,6 +531,16 @@
|
||||
"grid_first": "Grid first",
|
||||
"load_first": "Load first"
|
||||
}
|
||||
},
|
||||
"region": {
|
||||
"options": {
|
||||
"australia_new_zealand": "Australia and New Zealand",
|
||||
"china": "China",
|
||||
"era_server": "Era server (Atess Power)",
|
||||
"north_america": "North America",
|
||||
"other_regions": "Other regions",
|
||||
"smten_server": "SMTEN server"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -51,12 +51,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
|
||||
|
||||
try:
|
||||
camera = await hass.async_add_executor_job(
|
||||
HikCamera, url, port, username, password
|
||||
HikCamera, url, port, username, password, ssl
|
||||
)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ConfigEntryNotReady(f"Unable to connect to {host}") from err
|
||||
|
||||
device_id = camera.get_id()
|
||||
device_id = camera.get_id
|
||||
if device_id is None:
|
||||
raise ConfigEntryNotReady(f"Unable to get device ID from {host}")
|
||||
|
||||
@@ -70,6 +70,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
|
||||
device_type=device_type,
|
||||
)
|
||||
|
||||
# For NVRs or devices with no detected events, try to fetch events from ISAPI
|
||||
if device_type == "NVR" or not camera.current_event_states:
|
||||
|
||||
def fetch_and_inject_nvr_events() -> None:
|
||||
"""Fetch and inject NVR events in a single executor job."""
|
||||
if nvr_events := camera.get_event_triggers(None):
|
||||
camera.inject_events(nvr_events)
|
||||
|
||||
await hass.async_add_executor_job(fetch_and_inject_nvr_events)
|
||||
|
||||
# Start the event stream
|
||||
await hass.async_add_executor_job(camera.start_stream)
|
||||
|
||||
|
||||
@@ -49,14 +49,14 @@ class HikvisionConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
camera = await self.hass.async_add_executor_job(
|
||||
HikCamera, url, port, username, password
|
||||
HikCamera, url, port, username, password, ssl
|
||||
)
|
||||
device_id = camera.get_id()
|
||||
device_name = camera.get_name
|
||||
except requests.exceptions.RequestException:
|
||||
_LOGGER.exception("Error connecting to Hikvision device")
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
device_id = camera.get_id
|
||||
device_name = camera.get_name
|
||||
if device_id is None:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
@@ -102,16 +102,16 @@ class HikvisionConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
camera = await self.hass.async_add_executor_job(
|
||||
HikCamera, url, port, username, password
|
||||
HikCamera, url, port, username, password, ssl
|
||||
)
|
||||
device_id = camera.get_id()
|
||||
device_name = camera.get_name
|
||||
except requests.exceptions.RequestException:
|
||||
_LOGGER.exception(
|
||||
"Error connecting to Hikvision device during import, aborting"
|
||||
)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
device_id = camera.get_id
|
||||
device_name = camera.get_name
|
||||
if device_id is None:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"domain": "hikvision",
|
||||
"name": "Hikvision",
|
||||
"codeowners": ["@mezz64"],
|
||||
"codeowners": ["@mezz64", "@ptarjan"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hikvision",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyhik"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyHik==0.3.2"]
|
||||
"requirements": ["pyHik==0.3.4"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@bannhead"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hisense_aehw4a1",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyaehw4a1"],
|
||||
"requirements": ["pyaehw4a1==0.3.9"]
|
||||
|
||||
@@ -65,6 +65,11 @@ BINARY_SENSORS = (
|
||||
},
|
||||
translation_key="charging_connection",
|
||||
),
|
||||
HomeConnectBinarySensorEntityDescription(
|
||||
key=StatusKey.BSH_COMMON_INTERIOR_ILLUMINATION_ACTIVE,
|
||||
translation_key="interior_illumination_active",
|
||||
device_class=BinarySensorDeviceClass.LIGHT,
|
||||
),
|
||||
HomeConnectBinarySensorEntityDescription(
|
||||
key=StatusKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_DUST_BOX_INSERTED,
|
||||
translation_key="dust_box_inserted",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user