mirror of
https://github.com/home-assistant/core.git
synced 2025-10-05 17:59:27 +00:00
Compare commits
227 Commits
2025.10.0b
...
mqtt-suben
Author | SHA1 | Date | |
---|---|---|---|
![]() |
98e0513866 | ||
![]() |
972e643d88 | ||
![]() |
b0a08782e0 | ||
![]() |
6c9955f220 | ||
![]() |
f56b94c0f9 | ||
![]() |
3cf035820b | ||
![]() |
99a796d066 | ||
![]() |
1cd1b1aba8 | ||
![]() |
4131c14629 | ||
![]() |
c2acda5796 | ||
![]() |
4806e7e9d9 | ||
![]() |
76606fd44f | ||
![]() |
2983f1a3b6 | ||
![]() |
8019779b3a | ||
![]() |
62cdcbf422 | ||
![]() |
b12a5a36e1 | ||
![]() |
e32763e464 | ||
![]() |
b85cf3f9d2 | ||
![]() |
3777bcc2af | ||
![]() |
52cde48ff0 | ||
![]() |
bf1da35303 | ||
![]() |
c1bf11da34 | ||
![]() |
3c20325b37 | ||
![]() |
fd8ccb8d8f | ||
![]() |
d76e947021 | ||
![]() |
c91ed96543 | ||
![]() |
b164531ba8 | ||
![]() |
7c623a8704 | ||
![]() |
7ae3340336 | ||
![]() |
653b73c601 | ||
![]() |
7c93d91bae | ||
![]() |
07da0cfb2b | ||
![]() |
b411a11c2c | ||
![]() |
0555b84d05 | ||
![]() |
790bddef63 | ||
![]() |
a3089b8aa7 | ||
![]() |
77c8426d63 | ||
![]() |
faf226f6c2 | ||
![]() |
06d143b81a | ||
![]() |
08b6a0a702 | ||
![]() |
a20d1e3656 | ||
![]() |
36cc3682ca | ||
![]() |
1b495ecafa | ||
![]() |
7d1a0be07e | ||
![]() |
327f65c991 | ||
![]() |
4ac89f6849 | ||
![]() |
db3b070ed0 | ||
![]() |
6d940f476a | ||
![]() |
1ca701dda4 | ||
![]() |
291c44100c | ||
![]() |
c8d676e06b | ||
![]() |
4c1ae0eddc | ||
![]() |
39eadc814f | ||
![]() |
f7ecad61ba | ||
![]() |
fa4cb54549 | ||
![]() |
2be33c5e0a | ||
![]() |
904d7e5d5a | ||
![]() |
dbc4a65d48 | ||
![]() |
b93f4aabf1 | ||
![]() |
9eaa40c7a4 | ||
![]() |
b308a882fb | ||
![]() |
7f63ba2087 | ||
![]() |
d7269cfcc6 | ||
![]() |
2850a574f6 | ||
![]() |
dcb8d4f702 | ||
![]() |
aeadc0c4b0 | ||
![]() |
683c6b17be | ||
![]() |
69dd5c91b7 | ||
![]() |
5cf7dfca8f | ||
![]() |
62a49d4244 | ||
![]() |
93ee6322f2 | ||
![]() |
914990b58a | ||
![]() |
f78bb5adb6 | ||
![]() |
905f5e7289 | ||
![]() |
ec503618c3 | ||
![]() |
7a41cbc314 | ||
![]() |
c58ba734e7 | ||
![]() |
68f63be62f | ||
![]() |
2aa4ca1351 | ||
![]() |
fbabb27787 | ||
![]() |
0960d78eb5 | ||
![]() |
474b40511f | ||
![]() |
18b80aced3 | ||
![]() |
b964d362b7 | ||
![]() |
3914e41f3c | ||
![]() |
82bdfcb99b | ||
![]() |
976cea600f | ||
![]() |
8c8713c3f7 | ||
![]() |
2359ae6ce7 | ||
![]() |
b570fd35c8 | ||
![]() |
9d94e6b3b4 | ||
![]() |
cfab789823 | ||
![]() |
81917425dc | ||
![]() |
bfb62709d4 | ||
![]() |
ca3f2ee782 | ||
![]() |
fc8703a40f | ||
![]() |
80517c7ac1 | ||
![]() |
2b4b46eaf8 | ||
![]() |
40b9dae608 | ||
![]() |
5975cd6e09 | ||
![]() |
258c9ff52b | ||
![]() |
89c5d498a4 | ||
![]() |
76cb4d123a | ||
![]() |
f0c29c7699 | ||
![]() |
aa4151ced7 | ||
![]() |
0a6fa978fa | ||
![]() |
dc02002b9d | ||
![]() |
f071a3f38b | ||
![]() |
b935231e47 | ||
![]() |
b9f7613567 | ||
![]() |
1289a031ab | ||
![]() |
289546ef6d | ||
![]() |
aacff4db5d | ||
![]() |
f833b56122 | ||
![]() |
7eb0f2993f | ||
![]() |
abb341abfe | ||
![]() |
0d90614369 | ||
![]() |
ec84bebeea | ||
![]() |
9176867d6b | ||
![]() |
281a137ff5 | ||
![]() |
d6543480ac | ||
![]() |
ae6391b866 | ||
![]() |
10b56e4258 | ||
![]() |
0ff2597957 | ||
![]() |
026b28e962 | ||
![]() |
9a1e67294a | ||
![]() |
cdb448a5cc | ||
![]() |
ab80e726e2 | ||
![]() |
2d5d0f67b2 | ||
![]() |
d4100b6096 | ||
![]() |
955e854d77 | ||
![]() |
0c37f88c49 | ||
![]() |
48167eeb9c | ||
![]() |
24177197f7 | ||
![]() |
863fc0ba97 | ||
![]() |
9f7b229d02 | ||
![]() |
ffd909f3d9 | ||
![]() |
1ebf096a33 | ||
![]() |
96d51965e5 | ||
![]() |
04b510b020 | ||
![]() |
c9a301d50e | ||
![]() |
b304bd1a8b | ||
![]() |
b99525b231 | ||
![]() |
634db13990 | ||
![]() |
ad51a77989 | ||
![]() |
3348a39e8a | ||
![]() |
81c2e356ec | ||
![]() |
de6c3512d2 | ||
![]() |
36dc1e938a | ||
![]() |
07a78cf6f7 | ||
![]() |
eaa673e0c3 | ||
![]() |
f2c4ca081f | ||
![]() |
e3d707f0b4 | ||
![]() |
fb93fed2e5 | ||
![]() |
95dfc2f23d | ||
![]() |
408df2093a | ||
![]() |
f32bf0cc3e | ||
![]() |
dbbe3145b6 | ||
![]() |
f8bf3ea2ef | ||
![]() |
053bd31d43 | ||
![]() |
1aefc3f37a | ||
![]() |
3de955d9ce | ||
![]() |
0ff88fd366 | ||
![]() |
eb84020773 | ||
![]() |
4bbfea3c7c | ||
![]() |
63d4fb7558 | ||
![]() |
953895cd81 | ||
![]() |
a6c3f4efc0 | ||
![]() |
11e880d034 | ||
![]() |
e4d6bdb398 | ||
![]() |
6ced1783e3 | ||
![]() |
8051f78d10 | ||
![]() |
b724176b23 | ||
![]() |
fdca16ea92 | ||
![]() |
f8fd8b432a | ||
![]() |
9148ae70ce | ||
![]() |
447cb26d28 | ||
![]() |
2af36465f6 | ||
![]() |
d5f7265424 | ||
![]() |
cc16af7f2d | ||
![]() |
7a4d75bc44 | ||
![]() |
ec0380fd3b | ||
![]() |
b17cc71dfb | ||
![]() |
89b327ed7b | ||
![]() |
9bf361a1b8 | ||
![]() |
d11c171c75 | ||
![]() |
c523c45d17 | ||
![]() |
c1b9c0e1b6 | ||
![]() |
487b9ff03e | ||
![]() |
ec62b0cdfb | ||
![]() |
6d0470064f | ||
![]() |
7450b3fd1a | ||
![]() |
5b70910d77 | ||
![]() |
52de5ff5ff | ||
![]() |
c4389a1679 | ||
![]() |
35faaa6cae | ||
![]() |
3c0b13975a | ||
![]() |
bc88696339 | ||
![]() |
8f99c3f64a | ||
![]() |
88016d96d4 | ||
![]() |
47df73b18f | ||
![]() |
1c12d2b8cd | ||
![]() |
eb38837a8c | ||
![]() |
159c7fbfd1 | ||
![]() |
7ee31f0884 | ||
![]() |
0c5e12571a | ||
![]() |
9db973217f | ||
![]() |
cf1a745283 | ||
![]() |
834e3f1963 | ||
![]() |
3f8f7573c9 | ||
![]() |
0ae272f1f6 | ||
![]() |
8774295e2e | ||
![]() |
0c8d2594ef | ||
![]() |
205bd2676b | ||
![]() |
25849fd9cc | ||
![]() |
7d6eac9ff7 | ||
![]() |
31017ebc98 | ||
![]() |
724a7b0ecc | ||
![]() |
91e13d447a | ||
![]() |
7c8ad9d535 | ||
![]() |
9cd3ab853d | ||
![]() |
0b0f8c5829 | ||
![]() |
ae7bc7fb1b | ||
![]() |
09750872b5 | ||
![]() |
076e51017b | ||
![]() |
95e7b00996 | ||
![]() |
ddecf1ac21 |
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -190,7 +190,7 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -257,7 +257,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -332,14 +332,14 @@ jobs:
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -504,7 +504,7 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
64
.github/workflows/ci.yaml
vendored
64
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -309,7 +309,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -349,7 +349,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -389,7 +389,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -525,7 +525,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
@@ -570,7 +570,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -622,7 +622,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -651,7 +651,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -684,7 +684,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -711,7 +711,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
|
||||
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -741,7 +741,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -784,7 +784,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -831,7 +831,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -883,7 +883,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -891,7 +891,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -935,7 +935,7 @@ jobs:
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -967,7 +967,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1009,7 +1009,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1042,7 +1042,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1156,7 +1156,7 @@ jobs:
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1189,7 +1189,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1310,7 +1310,7 @@ jobs:
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1345,7 +1345,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1485,7 +1485,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1518,7 +1518,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -160,7 +160,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
@@ -203,6 +203,7 @@ homeassistant.components.feedreader.*
|
||||
homeassistant.components.file_upload.*
|
||||
homeassistant.components.filesize.*
|
||||
homeassistant.components.filter.*
|
||||
homeassistant.components.firefly_iii.*
|
||||
homeassistant.components.fitbit.*
|
||||
homeassistant.components.flexit_bacnet.*
|
||||
homeassistant.components.flux_led.*
|
||||
|
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -492,6 +492,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/filesize/ @gjohansson-ST
|
||||
/homeassistant/components/filter/ @dgomes
|
||||
/tests/components/filter/ @dgomes
|
||||
/homeassistant/components/firefly_iii/ @erwindouna
|
||||
/tests/components/firefly_iii/ @erwindouna
|
||||
/homeassistant/components/fireservicerota/ @cyberjunky
|
||||
/tests/components/fireservicerota/ @cyberjunky
|
||||
/homeassistant/components/firmata/ @DaAwesomeP
|
||||
@@ -953,6 +955,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/met_eireann/ @DylanGore
|
||||
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
||||
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
||||
/homeassistant/components/meteo_lt/ @xE1H
|
||||
/tests/components/meteo_lt/ @xE1H
|
||||
/homeassistant/components/meteoalarm/ @rolfberkenbosch
|
||||
/homeassistant/components/meteoclimatic/ @adrianmo
|
||||
/tests/components/meteoclimatic/ @adrianmo
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -616,34 +616,44 @@ async def async_enable_logging(
|
||||
),
|
||||
)
|
||||
|
||||
# Log errors to a file if we have write access to file or config dir
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
if log_file is None:
|
||||
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
if "SUPERVISOR" in os.environ:
|
||||
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||
# Rename the default log file if it exists, since previous versions created
|
||||
# it even on Supervisor
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
err_log_path = None
|
||||
else:
|
||||
err_log_path = default_log_path
|
||||
else:
|
||||
err_log_path = os.path.abspath(log_file)
|
||||
|
||||
err_path_exists = os.path.isfile(err_log_path)
|
||||
err_dir = os.path.dirname(err_log_path)
|
||||
if err_log_path:
|
||||
err_path_exists = os.path.isfile(err_log_path)
|
||||
err_dir = os.path.dirname(err_log_path)
|
||||
|
||||
# Check if we can write to the error log if it exists or that
|
||||
# we can create files in the containing directory if not.
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
# Check if we can write to the error log if it exists or that
|
||||
# we can create files in the containing directory if not.
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
logger.addHandler(err_handler)
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(err_handler)
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
else:
|
||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
else:
|
||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
||||
|
||||
async_activate_log_queue_handler(hass)
|
||||
|
||||
|
5
homeassistant/brands/eltako.json
Normal file
5
homeassistant/brands/eltako.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "eltako",
|
||||
"name": "Eltako",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
5
homeassistant/brands/konnected.json
Normal file
5
homeassistant/brands/konnected.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "konnected",
|
||||
"name": "Konnected",
|
||||
"integrations": ["konnected", "konnected_esphome"]
|
||||
}
|
5
homeassistant/brands/level.json
Normal file
5
homeassistant/brands/level.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "level",
|
||||
"name": "Level",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.1"]
|
||||
"requirements": ["accuweather==4.2.2"]
|
||||
}
|
||||
|
@@ -4,10 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS8
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
@@ -21,13 +29,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
session = async_get_clientsession(
|
||||
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
|
||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
|
||||
@@ -40,6 +51,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Migrate old config entry."""
|
||||
|
||||
if entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1 and entry.minor_version == 1:
|
||||
new_data = {**entry.data}
|
||||
advanced_data = {
|
||||
CONF_SSL: DEFAULT_SSL,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
}
|
||||
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -14,11 +15,23 @@ from airos.exceptions import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOS8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -28,6 +41,15 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
|
||||
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
|
||||
}
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -36,47 +58,109 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ubiquiti airOS."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self.airos_device: AirOS8
|
||||
self.errors: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
"""Handle the manual input of host and credentials."""
|
||||
self.errors = {}
|
||||
if user_input is not None:
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
airos_data = await airos_device.status()
|
||||
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
errors["base"] = "key_data_missing"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(airos_data.derived.mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
validated_info = await self._validate_and_get_device_info(user_input)
|
||||
if validated_info:
|
||||
return self.async_create_entry(
|
||||
title=airos_data.host.hostname, data=user_input
|
||||
title=validated_info["title"],
|
||||
data=validated_info["data"],
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
|
||||
)
|
||||
|
||||
async def _validate_and_get_device_info(
|
||||
self, config_data: dict[str, Any]
|
||||
) -> dict[str, Any] | None:
|
||||
"""Validate user input with the device API."""
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(
|
||||
self.hass,
|
||||
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=config_data[CONF_HOST],
|
||||
username=config_data[CONF_USERNAME],
|
||||
password=config_data[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
airos_data = await airos_device.status()
|
||||
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
self.errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
self.errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
self.errors["base"] = "key_data_missing"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception during credential validation")
|
||||
self.errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(airos_data.derived.mac)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
else:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return {"title": airos_data.host.hostname, "data": config_data}
|
||||
|
||||
return None
|
||||
|
||||
async def async_step_reauth(
|
||||
self,
|
||||
user_input: Mapping[str, Any],
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm(user_input)
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self,
|
||||
user_input: Mapping[str, Any],
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
self.errors = {}
|
||||
|
||||
if user_input:
|
||||
validate_data = {**self._get_reauth_entry().data, **user_input}
|
||||
if await self._validate_and_get_device_info(config_data=validate_data):
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates=validate_data,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
errors=self.errors,
|
||||
)
|
||||
|
@@ -7,3 +7,8 @@ DOMAIN = "airos"
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
MANUFACTURER = "Ubiquiti"
|
||||
|
||||
DEFAULT_VERIFY_SSL = False
|
||||
DEFAULT_SSL = True
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
@@ -14,7 +14,7 @@ from airos.exceptions import (
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
return await self.airos_device.status()
|
||||
except (AirOSConnectionAuthenticationError,) as err:
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryError(
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
|
@@ -2,11 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.const import CONF_HOST, CONF_SSL
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSDataUpdateCoordinator
|
||||
|
||||
|
||||
@@ -20,9 +20,14 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
|
||||
airos_data = self.coordinator.data
|
||||
url_schema = (
|
||||
"https"
|
||||
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
|
||||
else "http"
|
||||
)
|
||||
|
||||
configuration_url: str | None = (
|
||||
f"https://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.1"]
|
||||
"requirements": ["airos==0.5.4"]
|
||||
}
|
||||
|
@@ -2,6 +2,14 @@
|
||||
"config": {
|
||||
"flow_title": "Ubiquiti airOS device",
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airos::config::step::user::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
@@ -12,6 +20,18 @@
|
||||
"host": "IP address or hostname of the airOS device",
|
||||
"username": "Administrator username for the airOS device, normally 'ubnt'",
|
||||
"password": "Password configured through the UISP app or web interface"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"data": {
|
||||
"ssl": "Use HTTPS",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ssl": "Whether the connection should be encrypted (required for most devices)",
|
||||
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -22,7 +42,9 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -114,6 +114,8 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
),
|
||||
}
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:
|
||||
|
@@ -22,6 +22,17 @@ class OAuth2FlowHandler(
|
||||
VERSION = CONFIG_FLOW_VERSION
|
||||
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Check we have the cloud integration set up."""
|
||||
if "cloud" not in self.hass.config.components:
|
||||
return self.async_abort(
|
||||
reason="cloud_not_enabled",
|
||||
description_placeholders={"default_config": "default_config"},
|
||||
)
|
||||
return await super().async_step_user(user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -24,7 +24,8 @@
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
|
||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.",
|
||||
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
@@ -10,6 +10,7 @@ from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
@@ -20,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_update_unique_id
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -31,6 +33,7 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice, str], bool]
|
||||
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||
|
||||
|
||||
BINARY_SENSORS: Final = (
|
||||
@@ -41,46 +44,15 @@ BINARY_SENSORS: Final = (
|
||||
is_on_fn=lambda device, _: device.online,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="bluetooth",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda device, _: device.bluetooth_state,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="babyCryDetectionState",
|
||||
translation_key="baby_cry_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="beepingApplianceDetectionState",
|
||||
translation_key="beeping_appliance_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="coughDetectionState",
|
||||
translation_key="cough_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="dogBarkDetectionState",
|
||||
translation_key="dog_bark_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="humanPresenceDetectionState",
|
||||
key="detectionState",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="waterSoundsDetectionState",
|
||||
translation_key="water_sounds_detection",
|
||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
||||
is_on_fn=lambda device, key: bool(
|
||||
device.sensors[key].value != SENSOR_STATE_OFF
|
||||
),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
is_available_fn=lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -94,6 +66,15 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "detectionState" binary sensor
|
||||
await async_update_unique_id(
|
||||
hass,
|
||||
coordinator,
|
||||
BINARY_SENSOR_DOMAIN,
|
||||
"humanPresenceDetectionState",
|
||||
"detectionState",
|
||||
)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -125,3 +106,13 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
||||
return self.entity_description.is_on_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
except CannotRetrieveData:
|
||||
errors["base"] = "cannot_retrieve_data"
|
||||
|
@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
translation_key="cannot_retrieve_data_with_error",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except (CannotAuthenticate, TypeError) as err:
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
|
@@ -60,7 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"do not disturb": device.do_not_disturb,
|
||||
"response style": device.response_style,
|
||||
"bluetooth state": device.bluetooth_state,
|
||||
"sensors": device.sensors,
|
||||
}
|
||||
|
@@ -1,44 +1,4 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"default": "mdi:bluetooth-off",
|
||||
"state": {
|
||||
"on": "mdi:bluetooth"
|
||||
}
|
||||
},
|
||||
"baby_cry_detection": {
|
||||
"default": "mdi:account-voice-off",
|
||||
"state": {
|
||||
"on": "mdi:account-voice"
|
||||
}
|
||||
},
|
||||
"beeping_appliance_detection": {
|
||||
"default": "mdi:bell-off",
|
||||
"state": {
|
||||
"on": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"cough_detection": {
|
||||
"default": "mdi:blur-off",
|
||||
"state": {
|
||||
"on": "mdi:blur"
|
||||
}
|
||||
},
|
||||
"dog_bark_detection": {
|
||||
"default": "mdi:dog-side-off",
|
||||
"state": {
|
||||
"on": "mdi:dog-side"
|
||||
}
|
||||
},
|
||||
"water_sounds_detection": {
|
||||
"default": "mdi:water-pump-off",
|
||||
"state": {
|
||||
"on": "mdi:water-pump"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_sound": {
|
||||
"service": "mdi:cast-audio"
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioamazondevices==6.0.0"]
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.7"]
|
||||
}
|
||||
|
@@ -31,6 +31,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
||||
"""Amazon Devices sensor entity description."""
|
||||
|
||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
|
||||
|
||||
SENSORS: Final = (
|
||||
@@ -99,3 +102,13 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.device.sensors[self.entity_description.key].value
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -58,26 +58,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth"
|
||||
},
|
||||
"baby_cry_detection": {
|
||||
"name": "Baby crying"
|
||||
},
|
||||
"beeping_appliance_detection": {
|
||||
"name": "Beeping appliance"
|
||||
},
|
||||
"cough_detection": {
|
||||
"name": "Coughing"
|
||||
},
|
||||
"dog_bark_detection": {
|
||||
"name": "Dog barking"
|
||||
},
|
||||
"water_sounds_detection": {
|
||||
"name": "Water sounds"
|
||||
}
|
||||
},
|
||||
"notify": {
|
||||
"speak": {
|
||||
"name": "Speak"
|
||||
|
@@ -8,13 +8,17 @@ from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import alexa_api_call
|
||||
from .utils import alexa_api_call, async_update_unique_id
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -24,16 +28,17 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Alexa Devices switch entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
subkey: str
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
method: str
|
||||
|
||||
|
||||
SWITCHES: Final = (
|
||||
AmazonSwitchEntityDescription(
|
||||
key="do_not_disturb",
|
||||
subkey="AUDIO_PLAYER",
|
||||
key="dnd",
|
||||
translation_key="do_not_disturb",
|
||||
is_on_fn=lambda _device: _device.do_not_disturb,
|
||||
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
|
||||
method="set_do_not_disturb",
|
||||
),
|
||||
)
|
||||
@@ -48,6 +53,11 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Replace unique id for "DND" switch and remove from Speaker Group
|
||||
await async_update_unique_id(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
@@ -59,7 +69,7 @@ async def async_setup_entry(
|
||||
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
|
||||
for switch_desc in SWITCHES
|
||||
for serial_num in new_devices
|
||||
if switch_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
if switch_desc.key in coordinator.data[serial_num].sensors
|
||||
)
|
||||
|
||||
_check_device()
|
||||
@@ -94,3 +104,13 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if switch is on."""
|
||||
return self.entity_description.is_on_fn(self.device)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
self.entity_description.is_available_fn(
|
||||
self.device, self.entity_description.key
|
||||
)
|
||||
and super().available
|
||||
)
|
||||
|
@@ -6,9 +6,12 @@ from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import AmazonDevicesCoordinator
|
||||
from .entity import AmazonEntity
|
||||
|
||||
|
||||
@@ -38,3 +41,23 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
||||
) from err
|
||||
|
||||
return cmd_wrapper
|
||||
|
||||
|
||||
async def async_update_unique_id(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
domain: str,
|
||||
old_key: str,
|
||||
new_key: str,
|
||||
) -> None:
|
||||
"""Update unique id for entities created with old format."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{old_key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
|
||||
_LOGGER.debug("Updating unique_id for %s", entity_id)
|
||||
new_unique_id = unique_id.replace(old_key, new_key)
|
||||
|
||||
# Update the registry with the new unique_id
|
||||
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||
|
@@ -505,7 +505,7 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
@@ -513,6 +513,8 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
|
||||
integration_configs: dict[str, AnalyticsModifications] = {}
|
||||
|
||||
removed_devices: set[str] = set()
|
||||
|
||||
# Get device list
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
@@ -525,6 +527,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
|
||||
removed_devices.add(device_entry.id)
|
||||
continue
|
||||
|
||||
integration_domain = config_entry.domain
|
||||
|
||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||
@@ -551,7 +557,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
for domain, integration_info in integration_inputs.items()
|
||||
if (integration := integrations.get(domain)) is not None
|
||||
and integration.is_built_in
|
||||
and integration.integration_type in ("device", "hub")
|
||||
and integration.manifest.get("integration_type") in ("device", "hub")
|
||||
}
|
||||
|
||||
# Call integrations that implement the analytics platform
|
||||
@@ -614,11 +620,12 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
device_config = integration_config.devices.get(device_id, device_config)
|
||||
|
||||
if device_config.remove:
|
||||
removed_devices.add(device_id)
|
||||
continue
|
||||
|
||||
device_entry = dev_reg.devices[device_id]
|
||||
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
device_id_mapping[device_id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
{
|
||||
@@ -669,7 +676,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
|
||||
entity_entry = ent_reg.entities[entity_id]
|
||||
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
entity_state = hass.states.get(entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
@@ -690,15 +697,19 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (
|
||||
((device_id_ := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
if (device_id_ := entity_entry.device_id) is not None:
|
||||
if device_id_ in removed_devices:
|
||||
# The device was removed, so we remove the entity too
|
||||
continue
|
||||
|
||||
if (
|
||||
new_device_id := device_id_mapping.get(device_id_)
|
||||
) is not None and (new_device_id[0] == integration_domain):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
continue
|
||||
|
||||
entities_info.append(entity_info)
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
|
@@ -1308,7 +1308,9 @@ class PipelineRun:
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response, self._device_id
|
||||
conversation_result.response,
|
||||
self._satellite_id,
|
||||
self._device_id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1337,39 +1339,62 @@ class PipelineRun:
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
self,
|
||||
intent_response: intent.IntentResponse,
|
||||
satellite_id: str | None,
|
||||
device_id: str | None,
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
|
||||
or not intent_response.matched_states
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
for state in intent_response.matched_states:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
area_id: str | None = None
|
||||
|
||||
if (
|
||||
satellite_id is not None
|
||||
and (target_entity_entry := entity_registry.async_get(satellite_id))
|
||||
is not None
|
||||
):
|
||||
area_id = target_entity_entry.area_id
|
||||
device_id = target_entity_entry.device_id
|
||||
|
||||
if area_id is None:
|
||||
if device_id is None:
|
||||
return False
|
||||
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
if device_entry is None:
|
||||
return False
|
||||
|
||||
area_id = device_entry.area_id
|
||||
if area_id is None:
|
||||
return False
|
||||
|
||||
for state in intent_response.matched_states:
|
||||
target_entity_entry = entity_registry.async_get(state.entity_id)
|
||||
if target_entity_entry is None:
|
||||
return False
|
||||
|
||||
target_area_id = target_entity_entry.area_id
|
||||
if target_area_id is None:
|
||||
if target_entity_entry.device_id is None:
|
||||
return False
|
||||
|
||||
entity_area_id = entity_device.area_id
|
||||
target_device_entry = device_registry.async_get(
|
||||
target_entity_entry.device_id
|
||||
)
|
||||
if target_device_entry is None:
|
||||
return False
|
||||
|
||||
if entity_area_id != device.area_id:
|
||||
target_area_id = target_device_entry.area_id
|
||||
|
||||
if target_area_id != area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -2,9 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, TypeVar
|
||||
|
||||
T = TypeVar("T", dict[str, Any], list[Any], None)
|
||||
from typing import Any
|
||||
|
||||
TRANSLATION_MAP = {
|
||||
"wan_rx": "sensor_rx_bytes",
|
||||
@@ -36,7 +34,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
|
||||
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
|
||||
|
||||
|
||||
def translate_to_legacy(raw: T) -> T:
|
||||
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T:
|
||||
"""Translate raw data to legacy format for dicts and lists."""
|
||||
|
||||
if raw is None:
|
||||
|
@@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import frame
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
|
||||
|
||||
from . import util
|
||||
from .agent import BackupAgent
|
||||
@@ -144,7 +145,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
else:
|
||||
stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream))
|
||||
|
||||
worker_done_event = asyncio.Event()
|
||||
|
||||
@@ -152,7 +153,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
"""Call by the worker thread when it's done."""
|
||||
hass.loop.call_soon_threadsafe(worker_done_event.set)
|
||||
|
||||
stream = util.AsyncIteratorWriter(hass)
|
||||
stream = AsyncIteratorWriter(hass.loop)
|
||||
worker = threading.Thread(
|
||||
target=util.decrypt_backup,
|
||||
args=[backup, reader, stream, password, on_done, 0, []],
|
||||
|
@@ -38,6 +38,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
from homeassistant.util.async_iterator import AsyncIteratorReader
|
||||
|
||||
from . import util as backup_util
|
||||
from .agent import (
|
||||
@@ -72,7 +73,6 @@ from .models import (
|
||||
)
|
||||
from .store import BackupStore
|
||||
from .util import (
|
||||
AsyncIteratorReader,
|
||||
DecryptedBackupStreamer,
|
||||
EncryptedBackupStreamer,
|
||||
make_backup_dir,
|
||||
@@ -1525,7 +1525,7 @@ class BackupManager:
|
||||
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
|
||||
else:
|
||||
backup_stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream))
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
validate_password_stream, reader, password
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from concurrent.futures import CancelledError, Future
|
||||
import copy
|
||||
from dataclasses import dataclass, replace
|
||||
from io import BytesIO
|
||||
@@ -14,7 +13,7 @@ from pathlib import Path, PurePath
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
import threading
|
||||
from typing import IO, Any, Self, cast
|
||||
from typing import IO, Any, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarError, SecureTarFile, SecureTarReadError
|
||||
@@ -23,6 +22,11 @@ from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_iterator import (
|
||||
Abort,
|
||||
AsyncIteratorReader,
|
||||
AsyncIteratorWriter,
|
||||
)
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
@@ -59,12 +63,6 @@ class BackupEmpty(DecryptError):
|
||||
_message = "No tar files found in the backup."
|
||||
|
||||
|
||||
class AbortCipher(HomeAssistantError):
|
||||
"""Abort the cipher operation."""
|
||||
|
||||
_message = "Abort cipher operation."
|
||||
|
||||
|
||||
def make_backup_dir(path: Path) -> None:
|
||||
"""Create a backup directory if it does not exist."""
|
||||
path.mkdir(exist_ok=True)
|
||||
@@ -166,106 +164,6 @@ def validate_password(path: Path, password: str | None) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class AsyncIteratorReader:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._stream = stream
|
||||
self._buffer: bytes | None = None
|
||||
self._next_future: Future[bytes | None] | None = None
|
||||
self._pos: int = 0
|
||||
|
||||
async def _next(self) -> bytes | None:
|
||||
"""Get the next chunk from the iterator."""
|
||||
return await anext(self._stream, None)
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the reader."""
|
||||
self._aborted = True
|
||||
if self._next_future is not None:
|
||||
self._next_future.cancel()
|
||||
|
||||
def read(self, n: int = -1, /) -> bytes:
|
||||
"""Read data from the iterator."""
|
||||
result = bytearray()
|
||||
while n < 0 or len(result) < n:
|
||||
if not self._buffer:
|
||||
self._next_future = asyncio.run_coroutine_threadsafe(
|
||||
self._next(), self._hass.loop
|
||||
)
|
||||
if self._aborted:
|
||||
self._next_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._buffer = self._next_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos = 0
|
||||
if not self._buffer:
|
||||
# The stream is exhausted
|
||||
break
|
||||
chunk = self._buffer[self._pos : self._pos + n]
|
||||
result.extend(chunk)
|
||||
n -= len(chunk)
|
||||
self._pos += len(chunk)
|
||||
if self._pos == len(self._buffer):
|
||||
self._buffer = None
|
||||
return bytes(result)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the iterator."""
|
||||
|
||||
|
||||
class AsyncIteratorWriter:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._pos: int = 0
|
||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||
self._write_future: Future[bytes | None] | None = None
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Return the iterator."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Get the next chunk from the iterator."""
|
||||
if data := await self._queue.get():
|
||||
return data
|
||||
raise StopAsyncIteration
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the writer."""
|
||||
self._aborted = True
|
||||
if self._write_future is not None:
|
||||
self._write_future.cancel()
|
||||
|
||||
def tell(self) -> int:
|
||||
"""Return the current position in the iterator."""
|
||||
return self._pos
|
||||
|
||||
def write(self, s: bytes, /) -> int:
|
||||
"""Write data to the iterator."""
|
||||
self._write_future = asyncio.run_coroutine_threadsafe(
|
||||
self._queue.put(s), self._hass.loop
|
||||
)
|
||||
if self._aborted:
|
||||
self._write_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._write_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos += len(s)
|
||||
return len(s)
|
||||
|
||||
|
||||
def validate_password_stream(
|
||||
input_stream: IO[bytes],
|
||||
password: str | None,
|
||||
@@ -342,7 +240,7 @@ def decrypt_backup(
|
||||
finally:
|
||||
# Write an empty chunk to signal the end of the stream
|
||||
output_stream.write(b"")
|
||||
except AbortCipher:
|
||||
except Abort:
|
||||
LOGGER.debug("Cipher operation aborted")
|
||||
finally:
|
||||
on_done(error)
|
||||
@@ -430,7 +328,7 @@ def encrypt_backup(
|
||||
finally:
|
||||
# Write an empty chunk to signal the end of the stream
|
||||
output_stream.write(b"")
|
||||
except AbortCipher:
|
||||
except Abort:
|
||||
LOGGER.debug("Cipher operation aborted")
|
||||
finally:
|
||||
on_done(error)
|
||||
@@ -557,8 +455,8 @@ class _CipherBackupStreamer:
|
||||
self._hass.loop.call_soon_threadsafe(worker_status.done.set)
|
||||
|
||||
stream = await self._open_stream()
|
||||
reader = AsyncIteratorReader(self._hass, stream)
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
reader = AsyncIteratorReader(self._hass.loop, stream)
|
||||
writer = AsyncIteratorWriter(self._hass.loop)
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[
|
||||
|
@@ -73,11 +73,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
|
||||
# Add the websocket and API client
|
||||
entry.runtime_data = BangOlufsenData(websocket, client)
|
||||
|
||||
# Start WebSocket connection
|
||||
await client.connect_notifications(remote_control=True, reconnect=True)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Start WebSocket connection once the platforms have been loaded.
|
||||
# This ensures that the initial WebSocket notifications are dispatched to entities
|
||||
await client.connect_notifications(remote_control=True, reconnect=True)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -125,7 +125,8 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
new_entities=[
|
||||
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
|
||||
]
|
||||
],
|
||||
update_before_add=True,
|
||||
)
|
||||
|
||||
# Register actions.
|
||||
@@ -266,34 +267,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self._software_status.software_version,
|
||||
)
|
||||
|
||||
# Get overall device state once. This is handled by WebSocket events the rest of the time.
|
||||
product_state = await self._client.get_product_state()
|
||||
|
||||
# Get volume information.
|
||||
if product_state.volume:
|
||||
self._volume = product_state.volume
|
||||
|
||||
# Get all playback information.
|
||||
# Ensure that the metadata is not None upon startup
|
||||
if product_state.playback:
|
||||
if product_state.playback.metadata:
|
||||
self._playback_metadata = product_state.playback.metadata
|
||||
self._remote_leader = product_state.playback.metadata.remote_leader
|
||||
if product_state.playback.progress:
|
||||
self._playback_progress = product_state.playback.progress
|
||||
if product_state.playback.source:
|
||||
self._source_change = product_state.playback.source
|
||||
if product_state.playback.state:
|
||||
self._playback_state = product_state.playback.state
|
||||
# Set initial state
|
||||
if self._playback_state.value:
|
||||
self._state = self._playback_state.value
|
||||
|
||||
self._attr_media_position_updated_at = utcnow()
|
||||
|
||||
# Get the highest resolution available of the given images.
|
||||
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
|
||||
|
||||
# If the device has been updated with new sources, then the API will fail here.
|
||||
await self._async_update_sources()
|
||||
|
||||
|
@@ -3,16 +3,12 @@ beolink_allstandby:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
@@ -37,8 +33,6 @@ beolink_join:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
@@ -71,16 +65,12 @@ beolink_leave:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
|
@@ -272,6 +272,13 @@ async def async_setup_entry(
|
||||
observations: list[ConfigType] = [
|
||||
dict(subentry.data) for subentry in config_entry.subentries.values()
|
||||
]
|
||||
|
||||
for observation in observations:
|
||||
if observation[CONF_PLATFORM] == CONF_TEMPLATE:
|
||||
observation[CONF_VALUE_TEMPLATE] = Template(
|
||||
observation[CONF_VALUE_TEMPLATE], hass
|
||||
)
|
||||
|
||||
prior: float = config[CONF_PRIOR]
|
||||
probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD]
|
||||
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
|
@@ -315,9 +315,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.register_view(CalendarListView(component))
|
||||
hass.http.register_view(CalendarEventView(component))
|
||||
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "calendar", "calendar", "hass:calendar"
|
||||
)
|
||||
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar")
|
||||
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_create)
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_delete)
|
||||
|
@@ -51,12 +51,6 @@ from homeassistant.const import (
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
@@ -118,12 +112,6 @@ ATTR_FILENAME: Final = "filename"
|
||||
ATTR_MEDIA_PLAYER: Final = "media_player"
|
||||
ATTR_FORMAT: Final = "format"
|
||||
|
||||
# These constants are deprecated as of Home Assistant 2024.10
|
||||
# Please use the StreamType enum instead.
|
||||
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
|
||||
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
|
||||
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
|
||||
|
||||
|
||||
class CameraEntityFeature(IntFlag):
|
||||
"""Supported features of the camera entity."""
|
||||
@@ -1117,11 +1105,3 @@ async def async_handle_record_service(
|
||||
duration=service_call.data[CONF_DURATION],
|
||||
lookback=service_call.data[CONF_LOOKBACK],
|
||||
)
|
||||
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
@@ -53,7 +53,6 @@ from .const import (
|
||||
CONF_ACME_SERVER,
|
||||
CONF_ALEXA,
|
||||
CONF_ALIASES,
|
||||
CONF_CLOUDHOOK_SERVER,
|
||||
CONF_COGNITO_CLIENT_ID,
|
||||
CONF_ENTITY_CONFIG,
|
||||
CONF_FILTER,
|
||||
@@ -130,7 +129,6 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
|
||||
vol.Optional(CONF_ACCOUNTS_SERVER): str,
|
||||
vol.Optional(CONF_ACME_SERVER): str,
|
||||
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
|
||||
vol.Optional(CONF_RELAYER_SERVER): str,
|
||||
vol.Optional(CONF_REMOTESTATE_SERVER): str,
|
||||
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,
|
||||
|
@@ -78,7 +78,6 @@ CONF_USER_POOL_ID = "user_pool_id"
|
||||
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
|
||||
CONF_ACCOUNTS_SERVER = "accounts_server"
|
||||
CONF_ACME_SERVER = "acme_server"
|
||||
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
|
||||
CONF_RELAYER_SERVER = "relayer_server"
|
||||
CONF_REMOTESTATE_SERVER = "remotestate_server"
|
||||
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.1"],
|
||||
"requirements": ["hass-nabucasa==1.2.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
106
homeassistant/components/co2signal/quality_scale.yaml
Normal file
106
homeassistant/components/co2signal/quality_scale.yaml
Normal file
@@ -0,0 +1,106 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Stale docstring and test name: `test_form_home` and reusing result.
|
||||
Extract `async_setup_entry` into own fixture.
|
||||
Avoid importing `config_flow` in tests.
|
||||
Test reauth with errors
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
The config flow misses data descriptions.
|
||||
Remove URLs from data descriptions, they should be replaced with placeholders.
|
||||
Make use of Electricity Maps zone keys in country code as dropdown.
|
||||
Make use of location selector for coordinates.
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration do not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: todo
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any additional options.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
|
||||
`test_sensor` could use `snapshot_platform`
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration cannot be discovered, it is a connecting to a cloud service.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration cannot be discovered, it is a connecting to a cloud service.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration connects to a single service per configuration entry.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not raise any repairable issues.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connect to a single device per configuration entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@@ -25,23 +25,27 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .utils import async_client_session
|
||||
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = 111111
|
||||
DEFAULT_PIN = "111111"
|
||||
|
||||
|
||||
pin_regex = r"^[0-9]{4,10}$"
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
|
||||
)
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
}
|
||||
|
@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the config component."""
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "config", "config", "hass:cog", require_admin=True
|
||||
hass, "config", "config", "mdi:cog", require_admin=True
|
||||
)
|
||||
|
||||
for panel in SECTIONS:
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any, NoReturn
|
||||
|
||||
from aiohttp import web
|
||||
@@ -23,7 +24,12 @@ from homeassistant.helpers.data_entry_flow import (
|
||||
FlowManagerResourceView,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.json import json_fragment
|
||||
from homeassistant.helpers.json import (
|
||||
JSON_DUMP,
|
||||
find_paths_unserializable_data,
|
||||
json_bytes,
|
||||
json_fragment,
|
||||
)
|
||||
from homeassistant.loader import (
|
||||
Integration,
|
||||
IntegrationNotFound,
|
||||
@@ -31,6 +37,9 @@ from homeassistant.loader import (
|
||||
async_get_integrations,
|
||||
async_get_loaded_integration,
|
||||
)
|
||||
from homeassistant.util.json import format_unserializable_data
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -402,18 +411,40 @@ def config_entries_flow_subscribe(
|
||||
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
|
||||
async_on_flow_init_remove
|
||||
)
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"],
|
||||
[
|
||||
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
|
||||
for flw in hass.config_entries.flow.async_progress()
|
||||
if flw["context"]["source"]
|
||||
not in (
|
||||
config_entries.SOURCE_RECONFIGURE,
|
||||
config_entries.SOURCE_USER,
|
||||
try:
|
||||
serialized_flows = [
|
||||
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
|
||||
for flw in hass.config_entries.flow.async_progress()
|
||||
if flw["context"]["source"]
|
||||
not in (
|
||||
config_entries.SOURCE_RECONFIGURE,
|
||||
config_entries.SOURCE_USER,
|
||||
)
|
||||
]
|
||||
except (ValueError, TypeError):
|
||||
# If we can't serialize, we'll filter out unserializable flows
|
||||
serialized_flows = []
|
||||
for flw in hass.config_entries.flow.async_progress():
|
||||
if flw["context"]["source"] in (
|
||||
config_entries.SOURCE_RECONFIGURE,
|
||||
config_entries.SOURCE_USER,
|
||||
):
|
||||
continue
|
||||
try:
|
||||
serialized_flows.append(
|
||||
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
|
||||
)
|
||||
],
|
||||
except (ValueError, TypeError):
|
||||
_LOGGER.error(
|
||||
"Unable to serialize to JSON. Bad data found at %s",
|
||||
format_unserializable_data(
|
||||
find_paths_unserializable_data(flw, dump=JSON_DUMP)
|
||||
),
|
||||
)
|
||||
continue
|
||||
connection.send_message(
|
||||
websocket_api.messages.construct_event_message(
|
||||
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]"))
|
||||
)
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"]
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"]
|
||||
}
|
||||
|
@@ -32,6 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SOURCE: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_handle_source_entity_changes(
|
||||
@@ -46,15 +47,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))
|
||||
|
@@ -140,6 +140,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 4
|
||||
|
@@ -6,12 +6,13 @@ from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_DOMAIN
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -55,19 +56,40 @@ class DeviceAutomationConditionProtocol(Protocol):
|
||||
class DeviceCondition(Condition):
|
||||
"""Device condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
_hass: HomeAssistant
|
||||
_config: ConfigType
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = await async_validate_device_automation_config(
|
||||
hass,
|
||||
complete_config,
|
||||
cv.DEVICE_CONDITION_SCHEMA,
|
||||
DeviceAutomationType.CONDITION,
|
||||
)
|
||||
# Since we don't want to migrate device conditions to a new format
|
||||
# we just pass the entire config as options.
|
||||
complete_config[CONF_OPTIONS] = complete_config.copy()
|
||||
return complete_config
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate device condition config."""
|
||||
return await async_validate_device_automation_config(
|
||||
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
|
||||
)
|
||||
"""Validate config.
|
||||
|
||||
This is here just to satisfy the abstract class interface. It is never called.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
self._hass = hass
|
||||
assert config.options is not None
|
||||
self._config = config.options
|
||||
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Test a device condition."""
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
import logging
|
||||
@@ -55,16 +56,16 @@ async def async_setup_entry(
|
||||
hostname = entry.data[CONF_HOSTNAME]
|
||||
name = entry.data[CONF_NAME]
|
||||
|
||||
resolver_ipv4 = entry.options[CONF_RESOLVER]
|
||||
resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
|
||||
nameserver_ipv4 = entry.options[CONF_RESOLVER]
|
||||
nameserver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
|
||||
port_ipv4 = entry.options[CONF_PORT]
|
||||
port_ipv6 = entry.options[CONF_PORT_IPV6]
|
||||
|
||||
entities = []
|
||||
if entry.data[CONF_IPV4]:
|
||||
entities.append(WanIpSensor(name, hostname, resolver_ipv4, False, port_ipv4))
|
||||
entities.append(WanIpSensor(name, hostname, nameserver_ipv4, False, port_ipv4))
|
||||
if entry.data[CONF_IPV6]:
|
||||
entities.append(WanIpSensor(name, hostname, resolver_ipv6, True, port_ipv6))
|
||||
entities.append(WanIpSensor(name, hostname, nameserver_ipv6, True, port_ipv6))
|
||||
|
||||
async_add_entities(entities, update_before_add=True)
|
||||
|
||||
@@ -76,11 +77,13 @@ class WanIpSensor(SensorEntity):
|
||||
_attr_translation_key = "dnsip"
|
||||
_unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"})
|
||||
|
||||
resolver: aiodns.DNSResolver
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
hostname: str,
|
||||
resolver: str,
|
||||
nameserver: str,
|
||||
ipv6: bool,
|
||||
port: int,
|
||||
) -> None:
|
||||
@@ -88,12 +91,12 @@ class WanIpSensor(SensorEntity):
|
||||
self._attr_name = "IPv6" if ipv6 else None
|
||||
self._attr_unique_id = f"{hostname}_{ipv6}"
|
||||
self.hostname = hostname
|
||||
self.resolver = aiodns.DNSResolver(tcp_port=port, udp_port=port)
|
||||
self.resolver.nameservers = [resolver]
|
||||
self.port = port
|
||||
self.nameserver = nameserver
|
||||
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
|
||||
self._retries = DEFAULT_RETRIES
|
||||
self._attr_extra_state_attributes = {
|
||||
"resolver": resolver,
|
||||
"resolver": nameserver,
|
||||
"querytype": self.querytype,
|
||||
}
|
||||
self._attr_device_info = DeviceInfo(
|
||||
@@ -103,14 +106,26 @@ class WanIpSensor(SensorEntity):
|
||||
model=aiodns.__version__,
|
||||
name=name,
|
||||
)
|
||||
self.create_dns_resolver()
|
||||
|
||||
def create_dns_resolver(self) -> None:
|
||||
"""Create the DNS resolver."""
|
||||
self.resolver = aiodns.DNSResolver(
|
||||
nameservers=[self.nameserver], tcp_port=self.port, udp_port=self.port
|
||||
)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the current DNS IP address for hostname."""
|
||||
if self.resolver._closed: # noqa: SLF001
|
||||
self.create_dns_resolver()
|
||||
response = None
|
||||
try:
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
async with asyncio.timeout(10):
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
except TimeoutError:
|
||||
await self.resolver.close()
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
response = None
|
||||
|
||||
if response:
|
||||
sorted_ips = sort_ips(
|
||||
|
@@ -116,7 +116,11 @@ class EbusdData:
|
||||
try:
|
||||
_LOGGER.debug("Opening socket to ebusd %s", name)
|
||||
command_result = ebusdpy.write(self._address, self._circuit, name, value)
|
||||
if command_result is not None and "done" not in command_result:
|
||||
if (
|
||||
command_result is not None
|
||||
and "done" not in command_result
|
||||
and "empty" not in command_result
|
||||
):
|
||||
_LOGGER.warning("Write command failed: %s", name)
|
||||
except RuntimeError as err:
|
||||
_LOGGER.error(err)
|
||||
|
@@ -176,7 +176,7 @@
|
||||
"description": "Sets the participating sensors for a climate program.",
|
||||
"fields": {
|
||||
"preset_mode": {
|
||||
"name": "Climate Name",
|
||||
"name": "Climate program",
|
||||
"description": "Name of the climate program to set the sensors active on.\nDefaults to currently active program."
|
||||
},
|
||||
"device_ids": {
|
||||
@@ -188,7 +188,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"invalid_preset": {
|
||||
"message": "Invalid climate name, available options are: {options}"
|
||||
"message": "Invalid climate program, available options are: {options}"
|
||||
},
|
||||
"invalid_sensor": {
|
||||
"message": "Invalid sensor for thermostat, available options are: {options}"
|
||||
|
@@ -69,7 +69,9 @@ class EcovacsMap(
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_info(event: CachedMapInfoEvent) -> None:
|
||||
self._attr_extra_state_attributes["map_name"] = event.name
|
||||
for map_obj in event.maps:
|
||||
if map_obj.using:
|
||||
self._attr_extra_state_attributes["map_name"] = map_obj.name
|
||||
|
||||
async def on_changed(event: MapChangedEvent) -> None:
|
||||
self._attr_image_last_updated = event.when
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==14.0.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
|
||||
}
|
||||
|
@@ -2,3 +2,4 @@ raw_get_positions:
|
||||
target:
|
||||
entity:
|
||||
domain: vacuum
|
||||
integration: ecovacs
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aioecowitt==2025.9.1"]
|
||||
"requirements": ["aioecowitt==2025.9.2"]
|
||||
}
|
||||
|
@@ -3,14 +3,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import IntEnum
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyephember2.pyephember2 import (
|
||||
EphEmber,
|
||||
ZoneMode,
|
||||
boiler_state,
|
||||
zone_current_temperature,
|
||||
zone_is_active,
|
||||
zone_is_hotwater,
|
||||
zone_mode,
|
||||
zone_name,
|
||||
@@ -53,6 +54,15 @@ EPH_TO_HA_STATE = {
|
||||
"OFF": HVACMode.OFF,
|
||||
}
|
||||
|
||||
|
||||
class EPHBoilerStates(IntEnum):
|
||||
"""Boiler states for a zone given by the api."""
|
||||
|
||||
FIXME = 0
|
||||
OFF = 1
|
||||
ON = 2
|
||||
|
||||
|
||||
HA_STATE_TO_EPH = {value: key for key, value in EPH_TO_HA_STATE.items()}
|
||||
|
||||
|
||||
@@ -123,7 +133,7 @@ class EphEmberThermostat(ClimateEntity):
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return current HVAC action."""
|
||||
if zone_is_active(self._zone):
|
||||
if boiler_state(self._zone) == EPHBoilerStates.ON:
|
||||
return HVACAction.HEATING
|
||||
|
||||
return HVACAction.IDLE
|
||||
|
11
homeassistant/components/esphome/analytics.py
Normal file
11
homeassistant/components/esphome/analytics.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Analytics platform."""
|
||||
|
||||
from homeassistant.components.analytics import AnalyticsInput, AnalyticsModifications
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_modify_analytics(
|
||||
hass: HomeAssistant, analytics_input: AnalyticsInput
|
||||
) -> AnalyticsModifications:
|
||||
"""Modify the analytics."""
|
||||
return AnalyticsModifications(remove=True)
|
@@ -57,6 +57,7 @@ from .manager import async_replace_device
|
||||
|
||||
ERROR_REQUIRES_ENCRYPTION_KEY = "requires_encryption_key"
|
||||
ERROR_INVALID_ENCRYPTION_KEY = "invalid_psk"
|
||||
ERROR_INVALID_PASSWORD_AUTH = "invalid_auth"
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ZERO_NOISE_PSK = "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA="
|
||||
@@ -137,6 +138,11 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self._password = ""
|
||||
return await self._async_authenticate_or_add()
|
||||
|
||||
if error == ERROR_INVALID_PASSWORD_AUTH or (
|
||||
error is None and self._device_info and self._device_info.uses_password
|
||||
):
|
||||
return await self.async_step_authenticate()
|
||||
|
||||
if error is None and entry_data.get(CONF_NOISE_PSK):
|
||||
# Device was configured with encryption but now connects without it.
|
||||
# Check if it's the same device before offering to remove encryption.
|
||||
@@ -690,13 +696,15 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
cli = APIClient(
|
||||
host,
|
||||
port or DEFAULT_PORT,
|
||||
"",
|
||||
self._password or "",
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
noise_psk=noise_psk,
|
||||
)
|
||||
try:
|
||||
await cli.connect()
|
||||
self._device_info = await cli.device_info()
|
||||
except InvalidAuthAPIError:
|
||||
return ERROR_INVALID_PASSWORD_AUTH
|
||||
except RequiresEncryptionAPIError:
|
||||
return ERROR_REQUIRES_ENCRYPTION_KEY
|
||||
except InvalidEncryptionKeyAPIError as ex:
|
||||
|
@@ -372,6 +372,9 @@ class ESPHomeManager:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
try:
|
||||
await self._on_connect()
|
||||
except InvalidAuthAPIError as err:
|
||||
_LOGGER.warning("Authentication failed for %s: %s", self.host, err)
|
||||
await self._start_reauth_and_disconnect()
|
||||
except APIConnectionError as err:
|
||||
_LOGGER.warning(
|
||||
"Error getting setting up connection for %s: %s", self.host, err
|
||||
@@ -641,7 +644,14 @@ class ESPHomeManager:
|
||||
if self.reconnect_logic:
|
||||
await self.reconnect_logic.stop()
|
||||
return
|
||||
await self._start_reauth_and_disconnect()
|
||||
|
||||
async def _start_reauth_and_disconnect(self) -> None:
|
||||
"""Start reauth flow and stop reconnection attempts."""
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
await self.cli.disconnect()
|
||||
if self.reconnect_logic:
|
||||
await self.reconnect_logic.stop()
|
||||
|
||||
async def _handle_dynamic_encryption_key(
|
||||
self, device_info: EsphomeDeviceInfo
|
||||
@@ -1063,7 +1073,7 @@ def _async_register_service(
|
||||
service_name,
|
||||
{
|
||||
"description": (
|
||||
f"Calls the service {service.name} of the node {device_info.name}"
|
||||
f"Performs the action {service.name} of the node {device_info.name}"
|
||||
),
|
||||
"fields": fields,
|
||||
},
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.9.0",
|
||||
"aioesphomeapi==41.11.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -26,11 +26,14 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
|
||||
super().__init__(coordinator)
|
||||
self._serial = serial
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
connections=connections,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
@@ -62,11 +65,14 @@ class EzvizBaseEntity(Entity):
|
||||
self._serial = serial
|
||||
self.coordinator = coordinator
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
connections=connections,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
|
@@ -10,7 +10,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Filter from a config entry."""
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -18,8 +17,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload Filter config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@@ -246,6 +246,7 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
27
homeassistant/components/firefly_iii/__init__.py
Normal file
27
homeassistant/components/firefly_iii/__init__.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""The Firefly III integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
|
||||
"""Set up Firefly III from a config entry."""
|
||||
|
||||
coordinator = FireflyDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
97
homeassistant/components/firefly_iii/config_flow.py
Normal file
97
homeassistant/components/firefly_iii/config_flow.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""Config flow for the Firefly III integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyfirefly import (
|
||||
Firefly,
|
||||
FireflyAuthenticationError,
|
||||
FireflyConnectionError,
|
||||
FireflyTimeoutError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
try:
|
||||
client = Firefly(
|
||||
api_url=data[CONF_URL],
|
||||
api_key=data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
await client.get_about()
|
||||
except FireflyAuthenticationError:
|
||||
raise InvalidAuth from None
|
||||
except FireflyConnectionError as err:
|
||||
raise CannotConnect from err
|
||||
except FireflyTimeoutError as err:
|
||||
raise FireflyClientTimeout from err
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Firefly III."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
try:
|
||||
await _validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FireflyClientTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_URL], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
|
||||
class FireflyClientTimeout(HomeAssistantError):
|
||||
"""Error to indicate a timeout occurred."""
|
6
homeassistant/components/firefly_iii/const.py
Normal file
6
homeassistant/components/firefly_iii/const.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Constants for the Firefly III integration."""
|
||||
|
||||
DOMAIN = "firefly_iii"
|
||||
|
||||
MANUFACTURER = "Firefly III"
|
||||
NAME = "Firefly III"
|
137
homeassistant/components/firefly_iii/coordinator.py
Normal file
137
homeassistant/components/firefly_iii/coordinator.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Data Update Coordinator for Firefly III integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyfirefly import (
|
||||
Firefly,
|
||||
FireflyAuthenticationError,
|
||||
FireflyConnectionError,
|
||||
FireflyTimeoutError,
|
||||
)
|
||||
from pyfirefly.models import Account, Bill, Budget, Category, Currency
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type FireflyConfigEntry = ConfigEntry[FireflyDataUpdateCoordinator]
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FireflyCoordinatorData:
|
||||
"""Data structure for Firefly III coordinator data."""
|
||||
|
||||
accounts: list[Account]
|
||||
categories: list[Category]
|
||||
category_details: list[Category]
|
||||
budgets: list[Budget]
|
||||
bills: list[Bill]
|
||||
primary_currency: Currency
|
||||
|
||||
|
||||
class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]):
|
||||
"""Coordinator to manage data updates for Firefly III integration."""
|
||||
|
||||
config_entry: FireflyConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: FireflyConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=DEFAULT_SCAN_INTERVAL,
|
||||
)
|
||||
self.firefly = Firefly(
|
||||
api_url=self.config_entry.data[CONF_URL],
|
||||
api_key=self.config_entry.data[CONF_API_KEY],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
self.config_entry.data[CONF_VERIFY_SSL],
|
||||
cookie_jar=CookieJar(unsafe=True),
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
try:
|
||||
await self.firefly.get_about()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except FireflyConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except FireflyTimeoutError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
async def _async_update_data(self) -> FireflyCoordinatorData:
|
||||
"""Fetch data from Firefly III API."""
|
||||
now = datetime.now()
|
||||
start_date = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
end_date = now
|
||||
|
||||
try:
|
||||
accounts = await self.firefly.get_accounts()
|
||||
categories = await self.firefly.get_categories()
|
||||
category_details = [
|
||||
await self.firefly.get_category(
|
||||
category_id=int(category.id), start=start_date, end=end_date
|
||||
)
|
||||
for category in categories
|
||||
]
|
||||
primary_currency = await self.firefly.get_currency_primary()
|
||||
budgets = await self.firefly.get_budgets()
|
||||
bills = await self.firefly.get_bills()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except FireflyConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except FireflyTimeoutError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
return FireflyCoordinatorData(
|
||||
accounts=accounts,
|
||||
categories=categories,
|
||||
category_details=category_details,
|
||||
budgets=budgets,
|
||||
bills=bills,
|
||||
primary_currency=primary_currency,
|
||||
)
|
40
homeassistant/components/firefly_iii/entity.py
Normal file
40
homeassistant/components/firefly_iii/entity.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Base entity for Firefly III integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import FireflyDataUpdateCoordinator
|
||||
|
||||
|
||||
class FireflyBaseEntity(CoordinatorEntity[FireflyDataUpdateCoordinator]):
|
||||
"""Base class for Firefly III entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FireflyDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize a Firefly entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = entity_description
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
manufacturer=MANUFACTURER,
|
||||
configuration_url=URL(coordinator.config_entry.data[CONF_URL]),
|
||||
identifiers={
|
||||
(
|
||||
DOMAIN,
|
||||
f"{coordinator.config_entry.entry_id}_{self.entity_description.key}",
|
||||
)
|
||||
},
|
||||
)
|
18
homeassistant/components/firefly_iii/icons.json
Normal file
18
homeassistant/components/firefly_iii/icons.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"account_type": {
|
||||
"default": "mdi:bank",
|
||||
"state": {
|
||||
"expense": "mdi:cash-minus",
|
||||
"revenue": "mdi:cash-plus",
|
||||
"asset": "mdi:account-cash",
|
||||
"liability": "mdi:hand-coin"
|
||||
}
|
||||
},
|
||||
"category": {
|
||||
"default": "mdi:label"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
10
homeassistant/components/firefly_iii/manifest.json
Normal file
10
homeassistant/components/firefly_iii/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "firefly_iii",
|
||||
"name": "Firefly III",
|
||||
"codeowners": ["@erwindouna"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/firefly_iii",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.6"]
|
||||
}
|
68
homeassistant/components/firefly_iii/quality_scale.yaml
Normal file
68
homeassistant/components/firefly_iii/quality_scale.yaml
Normal file
@@ -0,0 +1,68 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
No explicit parallel updates are defined.
|
||||
reauthentication-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
No reauthentication flow is defined. It will be done in a next iteration.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
142
homeassistant/components/firefly_iii/sensor.py
Normal file
142
homeassistant/components/firefly_iii/sensor.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Sensor platform for Firefly III integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyfirefly.models import Account, Category
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor.const import SensorDeviceClass
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
|
||||
from .entity import FireflyBaseEntity
|
||||
|
||||
ACCOUNT_SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="account_type",
|
||||
translation_key="account",
|
||||
device_class=SensorDeviceClass.MONETARY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
)
|
||||
|
||||
CATEGORY_SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="category",
|
||||
translation_key="category",
|
||||
device_class=SensorDeviceClass.MONETARY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FireflyConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Firefly III sensor platform."""
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[SensorEntity] = [
|
||||
FireflyAccountEntity(
|
||||
coordinator=coordinator,
|
||||
entity_description=description,
|
||||
account=account,
|
||||
)
|
||||
for account in coordinator.data.accounts
|
||||
for description in ACCOUNT_SENSORS
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
FireflyCategoryEntity(
|
||||
coordinator=coordinator,
|
||||
entity_description=description,
|
||||
category=category,
|
||||
)
|
||||
for category in coordinator.data.category_details
|
||||
for description in CATEGORY_SENSORS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class FireflyAccountEntity(FireflyBaseEntity, SensorEntity):
|
||||
"""Entity for Firefly III account."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FireflyDataUpdateCoordinator,
|
||||
entity_description: SensorEntityDescription,
|
||||
account: Account,
|
||||
) -> None:
|
||||
"""Initialize Firefly account entity."""
|
||||
super().__init__(coordinator, entity_description)
|
||||
self._account = account
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{account.id}"
|
||||
self._attr_name = account.attributes.name
|
||||
self._attr_native_unit_of_measurement = (
|
||||
coordinator.data.primary_currency.attributes.code
|
||||
)
|
||||
|
||||
# Account type state doesn't go well with the icons.json. Need to fix it.
|
||||
if account.attributes.type == "expense":
|
||||
self._attr_icon = "mdi:cash-minus"
|
||||
elif account.attributes.type == "asset":
|
||||
self._attr_icon = "mdi:account-cash"
|
||||
elif account.attributes.type == "revenue":
|
||||
self._attr_icon = "mdi:cash-plus"
|
||||
elif account.attributes.type == "liability":
|
||||
self._attr_icon = "mdi:hand-coin"
|
||||
else:
|
||||
self._attr_icon = "mdi:bank"
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self._account.attributes.current_balance
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str] | None:
|
||||
"""Return extra state attributes for the account entity."""
|
||||
return {
|
||||
"account_role": self._account.attributes.account_role or "",
|
||||
"account_type": self._account.attributes.type or "",
|
||||
"current_balance": str(self._account.attributes.current_balance or ""),
|
||||
}
|
||||
|
||||
|
||||
class FireflyCategoryEntity(FireflyBaseEntity, SensorEntity):
|
||||
"""Entity for Firefly III category."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FireflyDataUpdateCoordinator,
|
||||
entity_description: SensorEntityDescription,
|
||||
category: Category,
|
||||
) -> None:
|
||||
"""Initialize Firefly category entity."""
|
||||
super().__init__(coordinator, entity_description)
|
||||
self._category = category
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{category.id}"
|
||||
self._attr_name = category.attributes.name
|
||||
self._attr_native_unit_of_measurement = (
|
||||
coordinator.data.primary_currency.attributes.code
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
spent_items = self._category.attributes.spent or []
|
||||
earned_items = self._category.attributes.earned or []
|
||||
|
||||
spent = sum(float(item.sum) for item in spent_items if item.sum is not None)
|
||||
earned = sum(float(item.sum) for item in earned_items if item.sum is not None)
|
||||
|
||||
if spent == 0 and earned == 0:
|
||||
return None
|
||||
return spent + earned
|
39
homeassistant/components/firefly_iii/strings.json
Normal file
39
homeassistant/components/firefly_iii/strings.json
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_key": "The API key for authenticating with Firefly",
|
||||
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
|
||||
},
|
||||
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "An error occurred while trying to connect to the Firefly instance: {error}"
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "An error occurred while trying to authenticate: {error}"
|
||||
},
|
||||
"timeout_connect": {
|
||||
"message": "A timeout occurred while trying to connect to the Firefly instance: {error}"
|
||||
}
|
||||
}
|
||||
}
|
@@ -46,6 +46,9 @@ async def async_get_config_entry_diagnostics(
|
||||
}
|
||||
for _, device in avm_wrapper.devices.items()
|
||||
],
|
||||
"cpu_temperatures": await hass.async_add_executor_job(
|
||||
avm_wrapper.fritz_status.get_cpu_temperatures
|
||||
),
|
||||
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
|
||||
},
|
||||
}
|
||||
|
@@ -459,7 +459,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"developer-tools",
|
||||
require_admin=True,
|
||||
sidebar_title="developer_tools",
|
||||
sidebar_icon="hass:hammer",
|
||||
sidebar_icon="mdi:hammer",
|
||||
)
|
||||
|
||||
@callback
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250924.0"]
|
||||
"requirements": ["home-assistant-frontend==20251001.0"]
|
||||
}
|
||||
|
@@ -1,8 +1,10 @@
|
||||
load_url:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
url:
|
||||
example: "https://home-assistant.io"
|
||||
required: true
|
||||
@@ -10,10 +12,12 @@ load_url:
|
||||
text:
|
||||
|
||||
set_config:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
key:
|
||||
example: "motionSensitivity"
|
||||
required: true
|
||||
@@ -26,12 +30,14 @@ set_config:
|
||||
text:
|
||||
|
||||
start_application:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
application:
|
||||
example: "de.ozerov.fully"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
|
@@ -147,6 +147,10 @@
|
||||
"name": "Load URL",
|
||||
"description": "Loads a URL on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "Device ID",
|
||||
"description": "The target device for this action."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "URL to load."
|
||||
@@ -157,6 +161,10 @@
|
||||
"name": "Set configuration",
|
||||
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
},
|
||||
"key": {
|
||||
"name": "Key",
|
||||
"description": "Configuration parameter to set."
|
||||
@@ -174,6 +182,10 @@
|
||||
"application": {
|
||||
"name": "Application",
|
||||
"description": "Package name of the application to start."
|
||||
},
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -108,6 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the humidifer,
|
||||
@@ -140,6 +141,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SENSOR: data["entity_id"]},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_entity_registry_updated_event(
|
||||
@@ -148,7 +150,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
@@ -186,11 +187,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
@@ -96,6 +96,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -35,6 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_HEATER: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the heater, but
|
||||
@@ -67,6 +68,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SENSOR: data["entity_id"]},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_entity_registry_updated_event(
|
||||
@@ -75,7 +77,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
@@ -113,11 +114,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -104,6 +104,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -77,10 +77,10 @@ class GeniusDevice(GeniusEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update an entity's state data."""
|
||||
if "_state" in self._device.data: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(
|
||||
self._device.data["_state"]["lastComms"]
|
||||
)
|
||||
if (state := self._device.data.get("_state")) and (
|
||||
last_comms := state.get("lastComms")
|
||||
) is not None: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(last_comms)
|
||||
|
||||
|
||||
class GeniusZone(GeniusEntity):
|
||||
|
@@ -1,7 +1,5 @@
|
||||
set_vacation:
|
||||
target:
|
||||
device:
|
||||
integration: google_mail
|
||||
entity:
|
||||
integration: google_mail
|
||||
fields:
|
||||
|
@@ -141,15 +141,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, (entry.options["group_type"],)
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
@@ -329,6 +329,7 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
@callback
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
|
@@ -1,5 +1,7 @@
|
||||
"""Coordinator module for managing Growatt data fetching."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
@@ -145,7 +147,7 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
return self.data.get("currency")
|
||||
|
||||
def get_data(
|
||||
self, entity_description: "GrowattSensorEntityDescription"
|
||||
self, entity_description: GrowattSensorEntityDescription
|
||||
) -> str | int | float | None:
|
||||
"""Get the data."""
|
||||
variable = entity_description.api_key
|
||||
|
@@ -4,9 +4,14 @@ from uuid import UUID
|
||||
|
||||
from habiticalib import Habitica
|
||||
|
||||
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
@@ -27,6 +32,7 @@ PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.CALENDAR,
|
||||
Platform.IMAGE,
|
||||
Platform.NOTIFY,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TODO,
|
||||
@@ -46,6 +52,7 @@ async def async_setup_entry(
|
||||
"""Set up habitica from a config entry."""
|
||||
party_added_by_this_entry: UUID | None = None
|
||||
device_reg = dr.async_get(hass)
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
session = async_get_clientsession(
|
||||
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
|
||||
@@ -96,6 +103,15 @@ async def async_setup_entry(
|
||||
device.id, remove_config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
notify_entities = [
|
||||
entry.entity_id
|
||||
for entry in entity_registry.entities.values()
|
||||
if entry.domain == NOTIFY_DOMAIN
|
||||
and entry.config_entry_id == config_entry.entry_id
|
||||
]
|
||||
for entity_id in notify_entities:
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
hass.config_entries.async_schedule_reload(config_entry.entry_id)
|
||||
|
||||
coordinator.async_add_listener(_party_update_listener)
|
||||
|
@@ -121,4 +121,4 @@ class HabiticaPartyBinarySensorEntity(HabiticaPartyBase, BinarySensorEntity):
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""If the binary sensor is on."""
|
||||
return self.coordinator.data.quest.active
|
||||
return self.coordinator.data.party.quest.active
|
||||
|
@@ -9,6 +9,7 @@ from datetime import timedelta
|
||||
from io import BytesIO
|
||||
import logging
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
@@ -48,6 +49,14 @@ class HabiticaData:
|
||||
tasks: list[TaskData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class HabiticaPartyData:
|
||||
"""Habitica party data."""
|
||||
|
||||
party: GroupData
|
||||
members: dict[UUID, UserData]
|
||||
|
||||
|
||||
type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator]
|
||||
|
||||
|
||||
@@ -192,11 +201,19 @@ class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
|
||||
return png.getvalue()
|
||||
|
||||
|
||||
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[GroupData]):
|
||||
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[HabiticaPartyData]):
|
||||
"""Habitica Party Coordinator."""
|
||||
|
||||
_update_interval = timedelta(minutes=15)
|
||||
|
||||
async def _update_data(self) -> GroupData:
|
||||
async def _update_data(self) -> HabiticaPartyData:
|
||||
"""Fetch the latest party data."""
|
||||
return (await self.habitica.get_group()).data
|
||||
|
||||
return HabiticaPartyData(
|
||||
party=(await self.habitica.get_group()).data,
|
||||
members={
|
||||
member.id: member
|
||||
for member in (await self.habitica.get_group_members()).data
|
||||
if member.id
|
||||
},
|
||||
)
|
||||
|
@@ -68,14 +68,14 @@ class HabiticaPartyBase(CoordinatorEntity[HabiticaPartyCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
if TYPE_CHECKING:
|
||||
assert config_entry.unique_id
|
||||
unique_id = f"{config_entry.unique_id}_{coordinator.data.id!s}"
|
||||
unique_id = f"{config_entry.unique_id}_{coordinator.data.party.id!s}"
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=NAME,
|
||||
name=coordinator.data.summary,
|
||||
name=coordinator.data.party.summary,
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
via_device=(DOMAIN, config_entry.unique_id),
|
||||
)
|
||||
|
@@ -194,6 +194,11 @@
|
||||
"quest_running": {
|
||||
"default": "mdi:script-text-play"
|
||||
}
|
||||
},
|
||||
"notify": {
|
||||
"party_chat": {
|
||||
"default": "mdi:forum"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user