mirror of
https://github.com/home-assistant/core.git
synced 2025-11-25 10:37:59 +00:00
Compare commits
306 Commits
add_trigge
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
15328a4aff | ||
|
|
083cfb89af | ||
|
|
bd129c2085 | ||
|
|
f73bc9242b | ||
|
|
4506be5065 | ||
|
|
80c611e562 | ||
|
|
b44aafc294 | ||
|
|
af1e3205b8 | ||
|
|
1360fe7f23 | ||
|
|
b5bb8583f8 | ||
|
|
9b62c212ce | ||
|
|
8fa56ad92e | ||
|
|
f82f0a1862 | ||
|
|
878881b100 | ||
|
|
743583d9bd | ||
|
|
f537204d22 | ||
|
|
ec74be7922 | ||
|
|
3574f647d0 | ||
|
|
6c4296a0de | ||
|
|
e780e3db8c | ||
|
|
4ed2efa4e8 | ||
|
|
abef6f7b3e | ||
|
|
5556fb99e6 | ||
|
|
16669e39bd | ||
|
|
ca088d81c3 | ||
|
|
12847fb0a4 | ||
|
|
8b758c46f4 | ||
|
|
f439471dc1 | ||
|
|
5ff3233b09 | ||
|
|
22daed083f | ||
|
|
13384de464 | ||
|
|
f5e5183190 | ||
|
|
e18668b8f9 | ||
|
|
15647f2720 | ||
|
|
c961126ee5 | ||
|
|
5142c5f418 | ||
|
|
3d459704e1 | ||
|
|
5a8ddcd0b3 | ||
|
|
2667a40b92 | ||
|
|
08baa99691 | ||
|
|
d84cf26f40 | ||
|
|
ba5472da90 | ||
|
|
e20b88a54f | ||
|
|
ac69712a51 | ||
|
|
f0e75ba0ed | ||
|
|
e64598e7f5 | ||
|
|
e6f9a8e7d6 | ||
|
|
1e8b42f843 | ||
|
|
430eee0b28 | ||
|
|
b4799aa7ea | ||
|
|
ab45460069 | ||
|
|
c8fd6db3ff | ||
|
|
0a9f200ca4 | ||
|
|
8591335660 | ||
|
|
c01089e994 | ||
|
|
79a7daf89d | ||
|
|
d22867b852 | ||
|
|
ddb74c5af4 | ||
|
|
9aec7b12c2 | ||
|
|
bf42e3769a | ||
|
|
43f40c6f0e | ||
|
|
03ac634e6d | ||
|
|
a204e85d84 | ||
|
|
79c7ad7646 | ||
|
|
704d4c896d | ||
|
|
5b6a4b0fea | ||
|
|
ef5573c693 | ||
|
|
45aecd525a | ||
|
|
ce1146492e | ||
|
|
1ce890b105 | ||
|
|
3e7bef77e5 | ||
|
|
1222828852 | ||
|
|
1ef64582eb | ||
|
|
d363bd63eb | ||
|
|
5916af1115 | ||
|
|
f8bf7ec1ff | ||
|
|
41e42b9581 | ||
|
|
51f68f2776 | ||
|
|
773cb7424c | ||
|
|
eefab75ef0 | ||
|
|
81b4122b73 | ||
|
|
bd0ab4d1fe | ||
|
|
80151b205d | ||
|
|
4488fdd2d6 | ||
|
|
a6e0bea805 | ||
|
|
994619e179 | ||
|
|
4db5be73a7 | ||
|
|
3cfedd1721 | ||
|
|
2f1301abaf | ||
|
|
21d61ef401 | ||
|
|
6850f9622a | ||
|
|
2b2bb79505 | ||
|
|
d97998e2e1 | ||
|
|
3ef62c97ca | ||
|
|
5cca95ab2f | ||
|
|
a4f0a21c8e | ||
|
|
11a2b5df6a | ||
|
|
07e2c8a610 | ||
|
|
43783ed896 | ||
|
|
a206604df5 | ||
|
|
2e82ac81b2 | ||
|
|
5139e9e566 | ||
|
|
c53674531c | ||
|
|
a04244ad25 | ||
|
|
b27b357b91 | ||
|
|
01e38853c0 | ||
|
|
06158fc9a1 | ||
|
|
e5968084a2 | ||
|
|
263839a6c0 | ||
|
|
931b2c2db0 | ||
|
|
8e26112db1 | ||
|
|
b1286af423 | ||
|
|
bd02e279cf | ||
|
|
6e5be843d6 | ||
|
|
5b1d86a04b | ||
|
|
1514013c3b | ||
|
|
54ed290cc1 | ||
|
|
1106f4f0e2 | ||
|
|
f73e92a34a | ||
|
|
74ad5066e2 | ||
|
|
4202a665af | ||
|
|
c9ddbe39ce | ||
|
|
8a2e8d2c61 | ||
|
|
ca2e8bfb56 | ||
|
|
c0772f3957 | ||
|
|
0b96da3b24 | ||
|
|
4c07b2b290 | ||
|
|
f699d95ea0 | ||
|
|
f6b9a0eb29 | ||
|
|
71c665ed49 | ||
|
|
85a1afb174 | ||
|
|
9668a68c28 | ||
|
|
a06aa8edfe | ||
|
|
4e30a5d930 | ||
|
|
696550a7f2 | ||
|
|
c064d23a99 | ||
|
|
ac7b063c2c | ||
|
|
e0778c8e2e | ||
|
|
2ba5a96d5b | ||
|
|
13c9fb6e37 | ||
|
|
102bb1f694 | ||
|
|
fc8f8b39b4 | ||
|
|
e5b2d44e8e | ||
|
|
ec0918027e | ||
|
|
8a54f8d4e2 | ||
|
|
5c27126b6d | ||
|
|
e069aff0e2 | ||
|
|
733526fae3 | ||
|
|
1ef001f8e9 | ||
|
|
7732377fde | ||
|
|
b7786e589b | ||
|
|
4f60970a91 | ||
|
|
1c1286dd57 | ||
|
|
41c9f08f60 | ||
|
|
fc4bfab0f7 | ||
|
|
769a12f74e | ||
|
|
dabaa2bc5e | ||
|
|
b674828a91 | ||
|
|
761da66658 | ||
|
|
c8aba62301 | ||
|
|
07ab2e6805 | ||
|
|
f62e0c8c08 | ||
|
|
6ca00f9dbb | ||
|
|
0fba80e30f | ||
|
|
7073c40385 | ||
|
|
8fb9d92daf | ||
|
|
2d81665f99 | ||
|
|
b398935539 | ||
|
|
95f588aae1 | ||
|
|
ffe524d95a | ||
|
|
4d4ad900b1 | ||
|
|
acc136af19 | ||
|
|
0f12a40eb2 | ||
|
|
bf124daf72 | ||
|
|
1682ced5cc | ||
|
|
80b316bc70 | ||
|
|
00d2340d4b | ||
|
|
514a329580 | ||
|
|
f2b8bb01bf | ||
|
|
30153ab059 | ||
|
|
2957b15ede | ||
|
|
12ace95f3e | ||
|
|
babe19767d | ||
|
|
d01843e1ab | ||
|
|
9964cb512a | ||
|
|
ae38214b7c | ||
|
|
9812286801 | ||
|
|
32a40e5919 | ||
|
|
97de944a14 | ||
|
|
c9bd87f4b3 | ||
|
|
ac46568996 | ||
|
|
7c1b8ee02c | ||
|
|
aa6901265d | ||
|
|
b76e9ad1c0 | ||
|
|
edb8007c65 | ||
|
|
956a29411f | ||
|
|
1a2361050b | ||
|
|
0c9e92f6f9 | ||
|
|
bfdff46859 | ||
|
|
9a22808499 | ||
|
|
88b373af41 | ||
|
|
dea2f37e8f | ||
|
|
30cce68e0b | ||
|
|
985eff972a | ||
|
|
31ca332158 | ||
|
|
bf76c1601d | ||
|
|
e572f8d48f | ||
|
|
482b5d49a3 | ||
|
|
126fd217e7 | ||
|
|
0327b0e1ec | ||
|
|
3d5a7b4813 | ||
|
|
e0bb30f63b | ||
|
|
e5ae58c5df | ||
|
|
13e4bb4b93 | ||
|
|
d5fd27d2a2 | ||
|
|
0a034b9984 | ||
|
|
6a8106c0eb | ||
|
|
2cacfc7413 | ||
|
|
388ab5c16c | ||
|
|
81ea6f8c25 | ||
|
|
4f885994b7 | ||
|
|
25e2c9ee80 | ||
|
|
12c04f5571 | ||
|
|
3ad1c6a47a | ||
|
|
e7e13ecc74 | ||
|
|
991b8d2040 | ||
|
|
43fadbf6b4 | ||
|
|
ca79d37135 | ||
|
|
df8ef15535 | ||
|
|
249c1530d0 | ||
|
|
081b769abc | ||
|
|
b8b101d747 | ||
|
|
a19be192e0 | ||
|
|
92da82a200 | ||
|
|
820ba1dfba | ||
|
|
63c8962f09 | ||
|
|
c1a6996549 | ||
|
|
05253841af | ||
|
|
f2ef0503a0 | ||
|
|
938da38fc3 | ||
|
|
9311a87bf5 | ||
|
|
b45294ded3 | ||
|
|
82d3190016 | ||
|
|
d8cbcc1977 | ||
|
|
ee05adfca1 | ||
|
|
168c915b5f | ||
|
|
6c80be52af | ||
|
|
ead92cdf82 | ||
|
|
c0f0cfef59 | ||
|
|
cefc0ba96e | ||
|
|
ad091b1062 | ||
|
|
876bc6d8c4 | ||
|
|
9f206d4363 | ||
|
|
a2d11e6d98 | ||
|
|
3b38af3984 | ||
|
|
3875f91bb9 | ||
|
|
c813776b0c | ||
|
|
3afb421cba | ||
|
|
c16633568b | ||
|
|
87f8ff2bb4 | ||
|
|
b423303f1e | ||
|
|
f6ff222679 | ||
|
|
0152fa0c03 | ||
|
|
37ebbe83bc | ||
|
|
63e036d39e | ||
|
|
f0cbf34a78 | ||
|
|
596bc89ee6 | ||
|
|
b8c877e1d2 | ||
|
|
197d9781cb | ||
|
|
f3f323637e | ||
|
|
9748abc103 | ||
|
|
596f049971 | ||
|
|
dee80cb6f5 | ||
|
|
b4ab73468b | ||
|
|
a300199a97 | ||
|
|
09dd765583 | ||
|
|
0c8b765415 | ||
|
|
0824ec502f | ||
|
|
9e0e353a5f | ||
|
|
e934b006e2 | ||
|
|
05479bb8fd | ||
|
|
d07247566d | ||
|
|
19e6097df6 | ||
|
|
2cff3cf29c | ||
|
|
5cac9b8e5e | ||
|
|
c2a516ea32 | ||
|
|
192b38d3e2 | ||
|
|
bb018e3546 | ||
|
|
4919d73cc5 | ||
|
|
f3ddffb5ff | ||
|
|
9bdfa77fa0 | ||
|
|
c65003009f | ||
|
|
0f722109b7 | ||
|
|
f7d86dec3c | ||
|
|
6b49c8a70c | ||
|
|
ab9a8f3e53 | ||
|
|
4e12628266 | ||
|
|
e6d8d4de42 | ||
|
|
6620b90eb4 | ||
|
|
6fd3af8891 | ||
|
|
46979b8418 | ||
|
|
1718a11de2 | ||
|
|
2016b1d8c7 | ||
|
|
4b72e45fc2 | ||
|
|
ead5ce905b | ||
|
|
f233f2da3f |
24
.github/workflows/builder.yml
vendored
24
.github/workflows/builder.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -88,13 +88,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
exclude:
|
||||
- arch: armv7
|
||||
- arch: armhf
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -120,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -227,7 +223,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -265,7 +261,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -309,7 +305,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
@@ -418,10 +414,10 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -463,7 +459,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -257,7 +257,7 @@ jobs:
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
42
.github/workflows/wheels.yml
vendored
42
.github/workflows/wheels.yml
vendored
@@ -33,11 +33,11 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -77,20 +77,8 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
@@ -99,13 +87,6 @@ jobs:
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
@@ -138,13 +119,6 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
exclude:
|
||||
- abi: cp314
|
||||
arch: armv7
|
||||
- abi: cp314
|
||||
arch: armhf
|
||||
- abi: cp314
|
||||
arch: i386
|
||||
steps:
|
||||
- *checkout
|
||||
|
||||
@@ -154,12 +128,6 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- &download-build-constraints
|
||||
name: Download build_constraints
|
||||
uses: *actions-download-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- &download-requirements-diff
|
||||
name: Download requirements_diff
|
||||
uses: *actions-download-artifact
|
||||
@@ -199,7 +167,7 @@ jobs:
|
||||
- *checkout
|
||||
|
||||
- *download-env-file
|
||||
- *download-build-constraints
|
||||
|
||||
- *download-requirements-diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
@@ -209,10 +177,6 @@ jobs:
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
if [ "${{ matrix.arch }}" = "i386" ]; then
|
||||
echo "NPY_DISABLE_SVML=1" >> .env_file
|
||||
fi
|
||||
|
||||
# Do not pin numpy in wheels building
|
||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||
|
||||
@@ -94,7 +94,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
files: ^(script/hassfest/(metadata|docker)\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
||||
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -69,6 +69,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airly/ @bieniu
|
||||
/homeassistant/components/airnow/ @asymworks
|
||||
/tests/components/airnow/ @asymworks
|
||||
/homeassistant/components/airobot/ @mettolen
|
||||
/tests/components/airobot/ @mettolen
|
||||
/homeassistant/components/airos/ @CoMPaTech
|
||||
/tests/components/airos/ @CoMPaTech
|
||||
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
||||
@@ -389,6 +391,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duckdns/ @tr4nt0r
|
||||
/tests/components/duckdns/ @tr4nt0r
|
||||
/homeassistant/components/duke_energy/ @hunterjm
|
||||
/tests/components/duke_energy/ @hunterjm
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
@@ -627,6 +631,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/guardian/ @bachya
|
||||
/homeassistant/components/habitica/ @tr4nt0r
|
||||
/tests/components/habitica/ @tr4nt0r
|
||||
/homeassistant/components/hanna/ @bestycame
|
||||
/tests/components/hanna/ @bestycame
|
||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||
/tests/components/hardkernel/ @home-assistant/core
|
||||
/homeassistant/components/hardware/ @home-assistant/core
|
||||
@@ -846,6 +852,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/kraken/ @eifinger
|
||||
/homeassistant/components/kulersky/ @emlove
|
||||
/tests/components/kulersky/ @emlove
|
||||
/homeassistant/components/labs/ @home-assistant/core
|
||||
/tests/components/labs/ @home-assistant/core
|
||||
/homeassistant/components/lacrosse_view/ @IceBotYT
|
||||
/tests/components/lacrosse_view/ @IceBotYT
|
||||
/homeassistant/components/lamarzocco/ @zweckj
|
||||
|
||||
23
Dockerfile
generated
23
Dockerfile
generated
@@ -15,23 +15,14 @@ ARG QEMU_CPU
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.9.6
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
go2rtc --version \
|
||||
# Install uv
|
||||
&& pip3 install uv==0.9.6
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.11.0
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -176,6 +176,8 @@ FRONTEND_INTEGRATIONS = {
|
||||
STAGE_0_INTEGRATIONS = (
|
||||
# Load logging and http deps as soon as possible
|
||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
|
||||
# Setup labs for preview features
|
||||
("labs", {"labs"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||
# Setup frontend
|
||||
("frontend", FRONTEND_INTEGRATIONS, None),
|
||||
# Setup recorder
|
||||
@@ -212,6 +214,7 @@ DEFAULT_INTEGRATIONS = {
|
||||
"backup",
|
||||
"frontend",
|
||||
"hardware",
|
||||
"labs",
|
||||
"logger",
|
||||
"network",
|
||||
"system_health",
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""The Actron Air integration."""
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirNeoACSystem,
|
||||
ActronNeoAPI,
|
||||
ActronNeoAPIError,
|
||||
ActronNeoAuthError,
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAPIError,
|
||||
ActronAirAuthError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
@@ -23,16 +23,16 @@ PLATFORM = [Platform.CLIMATE]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Set up Actron Air integration from a config entry."""
|
||||
|
||||
api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirNeoACSystem] = []
|
||||
api = ActronAirAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirACSystem] = []
|
||||
|
||||
try:
|
||||
systems = await api.get_ac_systems()
|
||||
await api.update_status()
|
||||
except ActronNeoAuthError:
|
||||
except ActronAirAuthError:
|
||||
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||
raise
|
||||
except ActronNeoAPIError as err:
|
||||
except ActronAirAPIError as err:
|
||||
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||
raise
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone
|
||||
from actron_neo_api import ActronAirStatus, ActronAirZone
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
@@ -132,7 +132,7 @@ class ActronSystemClimate(BaseClimateEntity):
|
||||
return self._status.max_temp
|
||||
|
||||
@property
|
||||
def _status(self) -> ActronAirNeoStatus:
|
||||
def _status(self) -> ActronAirStatus:
|
||||
"""Get the current status from the coordinator."""
|
||||
return self.coordinator.data
|
||||
|
||||
@@ -194,7 +194,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
zone: ActronAirNeoZone,
|
||||
zone: ActronAirZone,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator, zone.title)
|
||||
@@ -221,7 +221,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
||||
return self._zone.max_temp
|
||||
|
||||
@property
|
||||
def _zone(self) -> ActronAirNeoZone:
|
||||
def _zone(self) -> ActronAirZone:
|
||||
"""Get the current zone data from the coordinator."""
|
||||
status = self.coordinator.data
|
||||
return status.zones[self._zone_id]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronNeoAPI, ActronNeoAuthError
|
||||
from actron_neo_api import ActronAirAPI, ActronAirAuthError
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
@@ -17,7 +17,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._api: ActronNeoAPI | None = None
|
||||
self._api: ActronAirAPI | None = None
|
||||
self._device_code: str | None = None
|
||||
self._user_code: str = ""
|
||||
self._verification_uri: str = ""
|
||||
@@ -30,10 +30,10 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
if self._api is None:
|
||||
_LOGGER.debug("Initiating device authorization")
|
||||
self._api = ActronNeoAPI()
|
||||
self._api = ActronAirAPI()
|
||||
try:
|
||||
device_code_response = await self._api.request_device_code()
|
||||
except ActronNeoAuthError as err:
|
||||
except ActronAirAuthError as err:
|
||||
_LOGGER.error("OAuth2 flow failed: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
@@ -50,7 +50,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
await self._api.poll_for_token(self._device_code)
|
||||
_LOGGER.debug("Authorization successful")
|
||||
except ActronNeoAuthError as ex:
|
||||
except ActronAirAuthError as ex:
|
||||
_LOGGER.exception("Error while waiting for device authorization")
|
||||
raise CannotConnect from ex
|
||||
|
||||
@@ -89,7 +89,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
user_data = await self._api.get_user_info()
|
||||
except ActronNeoAuthError as err:
|
||||
except ActronAirAuthError as err:
|
||||
_LOGGER.error("Error getting user info: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI
|
||||
from actron_neo_api import ActronAirACSystem, ActronAirAPI, ActronAirStatus
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -23,7 +23,7 @@ ERROR_UNKNOWN = "unknown_error"
|
||||
class ActronAirRuntimeData:
|
||||
"""Runtime data for the Actron Air integration."""
|
||||
|
||||
api: ActronNeoAPI
|
||||
api: ActronAirAPI
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator]
|
||||
|
||||
|
||||
@@ -33,15 +33,15 @@ AUTH_ERROR_THRESHOLD = 3
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
"""System coordinator for Actron Air integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
api: ActronNeoAPI,
|
||||
system: ActronAirNeoACSystem,
|
||||
api: ActronAirAPI,
|
||||
system: ActronAirACSystem,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
@@ -57,7 +57,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
|
||||
async def _async_update_data(self) -> ActronAirNeoStatus:
|
||||
async def _async_update_data(self) -> ActronAirStatus:
|
||||
"""Fetch updates and merge incremental changes into the full state."""
|
||||
await self.api.update_status()
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.1.84"]
|
||||
"requirements": ["actron-neo-api==0.1.87"]
|
||||
}
|
||||
|
||||
29
homeassistant/components/airobot/__init__.py
Normal file
29
homeassistant/components/airobot/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""The Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
"""Set up Airobot from a config entry."""
|
||||
coordinator = AirobotDataUpdateCoordinator(hass, entry)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
151
homeassistant/components/airobot/climate.py
Normal file
151
homeassistant/components/airobot/climate.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""Climate platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest.const import (
|
||||
MODE_AWAY,
|
||||
MODE_HOME,
|
||||
SETPOINT_TEMP_MAX,
|
||||
SETPOINT_TEMP_MIN,
|
||||
)
|
||||
from pyairobotrest.exceptions import AirobotError
|
||||
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
PRESET_BOOST,
|
||||
PRESET_HOME,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
_PRESET_MODE_2_MODE = {
|
||||
PRESET_AWAY: MODE_AWAY,
|
||||
PRESET_HOME: MODE_HOME,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot climate platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities([AirobotClimate(coordinator)])
|
||||
|
||||
|
||||
class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
"""Representation of an Airobot thermostat."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "thermostat"
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [HVACMode.HEAT]
|
||||
_attr_preset_modes = [PRESET_HOME, PRESET_AWAY, PRESET_BOOST]
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_attr_min_temp = SETPOINT_TEMP_MIN
|
||||
_attr_max_temp = SETPOINT_TEMP_MAX
|
||||
|
||||
@property
|
||||
def _status(self) -> ThermostatStatus:
|
||||
"""Get status from coordinator data."""
|
||||
return self.coordinator.data.status
|
||||
|
||||
@property
|
||||
def _settings(self) -> ThermostatSettings:
|
||||
"""Get settings from coordinator data."""
|
||||
return self.coordinator.data.settings
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._status.temp_air
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
if self._settings.is_home_mode:
|
||||
return self._settings.setpoint_temp
|
||||
return self._settings.setpoint_temp_away
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return current HVAC mode."""
|
||||
if self._status.is_heating:
|
||||
return HVACMode.HEAT
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return current HVAC action."""
|
||||
if self._status.is_heating:
|
||||
return HVACAction.HEATING
|
||||
return HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return current preset mode."""
|
||||
if self._settings.setting_flags.boost_enabled:
|
||||
return PRESET_BOOST
|
||||
if self._settings.is_home_mode:
|
||||
return PRESET_HOME
|
||||
return PRESET_AWAY
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temperature = kwargs[ATTR_TEMPERATURE]
|
||||
|
||||
try:
|
||||
if self._settings.is_home_mode:
|
||||
await self.coordinator.client.set_home_temperature(float(temperature))
|
||||
else:
|
||||
await self.coordinator.client.set_away_temperature(float(temperature))
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_temperature_failed",
|
||||
translation_placeholders={"temperature": str(temperature)},
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
try:
|
||||
if preset_mode == PRESET_BOOST:
|
||||
# Enable boost mode
|
||||
if not self._settings.setting_flags.boost_enabled:
|
||||
await self.coordinator.client.set_boost_mode(True)
|
||||
else:
|
||||
# Disable boost mode if it's enabled
|
||||
if self._settings.setting_flags.boost_enabled:
|
||||
await self.coordinator.client.set_boost_mode(False)
|
||||
|
||||
# Set the mode (HOME or AWAY)
|
||||
await self.coordinator.client.set_mode(_PRESET_MODE_2_MODE[preset_mode])
|
||||
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_preset_mode_failed",
|
||||
translation_placeholders={"preset_mode": preset_mode},
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
183
homeassistant/components/airobot/config_flow.py
Normal file
183
homeassistant/components/airobot/config_flow.py
Normal file
@@ -0,0 +1,183 @@
|
||||
"""Config flow for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest import AirobotClient
|
||||
from pyairobotrest.exceptions import (
|
||||
AirobotAuthError,
|
||||
AirobotConnectionError,
|
||||
AirobotError,
|
||||
AirobotTimeoutError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow as BaseConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeviceInfo:
|
||||
"""Device information."""
|
||||
|
||||
title: str
|
||||
device_id: str
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> DeviceInfo:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
client = AirobotClient(
|
||||
host=data[CONF_HOST],
|
||||
username=data[CONF_USERNAME],
|
||||
password=data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
# Try to fetch data to validate connection and authentication
|
||||
status = await client.get_statuses()
|
||||
settings = await client.get_settings()
|
||||
except AirobotAuthError as err:
|
||||
raise InvalidAuth from err
|
||||
except (AirobotConnectionError, AirobotTimeoutError, AirobotError) as err:
|
||||
raise CannotConnect from err
|
||||
|
||||
# Use device name or device ID as title
|
||||
title = settings.device_name or status.device_id
|
||||
|
||||
return DeviceInfo(title=title, device_id=status.device_id)
|
||||
|
||||
|
||||
class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Airobot."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered_host: str | None = None
|
||||
self._discovered_mac: str | None = None
|
||||
self._discovered_device_id: str | None = None
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
# Store the discovered IP address and MAC
|
||||
self._discovered_host = discovery_info.ip
|
||||
self._discovered_mac = discovery_info.macaddress
|
||||
|
||||
# Extract device_id from hostname (format: airobot-thermostat-t01xxxxxx)
|
||||
hostname = discovery_info.hostname.lower()
|
||||
device_id = hostname.replace("airobot-thermostat-", "").upper()
|
||||
self._discovered_device_id = device_id
|
||||
# Set unique_id to device_id for duplicate detection
|
||||
await self.async_set_unique_id(device_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||
|
||||
# Show the confirmation form
|
||||
return await self.async_step_dhcp_confirm()
|
||||
|
||||
async def async_step_dhcp_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery confirmation - ask for credentials only."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
# Combine discovered host and device_id with user-provided password
|
||||
data = {
|
||||
CONF_HOST: self._discovered_host,
|
||||
CONF_USERNAME: self._discovered_device_id,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, data)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Store MAC address in config entry data
|
||||
if self._discovered_mac:
|
||||
data[CONF_MAC] = self._discovered_mac
|
||||
|
||||
return self.async_create_entry(title=info.title, data=data)
|
||||
|
||||
# Only ask for password since we already have the device_id from discovery
|
||||
return self.async_show_form(
|
||||
step_id="dhcp_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"host": self._discovered_host or "",
|
||||
"device_id": self._discovered_device_id or "",
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Use device ID as unique ID to prevent duplicates
|
||||
await self.async_set_unique_id(info.device_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info.title, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
5
homeassistant/components/airobot/const.py
Normal file
5
homeassistant/components/airobot/const.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Constants for the Airobot integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "airobot"
|
||||
59
homeassistant/components/airobot/coordinator.py
Normal file
59
homeassistant/components/airobot/coordinator.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Coordinator for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyairobotrest import AirobotClient
|
||||
from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .models import AirobotData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Update interval - thermostat measures air every 30 seconds
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
type AirobotConfigEntry = ConfigEntry[AirobotDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||
"""Class to manage fetching Airobot data."""
|
||||
|
||||
config_entry: AirobotConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AirobotConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
self.client = AirobotClient(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirobotData:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
status = await self.client.get_statuses()
|
||||
settings = await self.client.get_settings()
|
||||
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||
|
||||
return AirobotData(status=status, settings=settings)
|
||||
42
homeassistant/components/airobot/entity.py
Normal file
42
homeassistant/components/airobot/entity.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Base entity for Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotDataUpdateCoordinator
|
||||
|
||||
|
||||
class AirobotEntity(CoordinatorEntity[AirobotDataUpdateCoordinator]):
|
||||
"""Base class for Airobot entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
status = coordinator.data.status
|
||||
settings = coordinator.data.settings
|
||||
|
||||
self._attr_unique_id = status.device_id
|
||||
|
||||
connections = set()
|
||||
if (mac := coordinator.config_entry.data.get(CONF_MAC)) is not None:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, status.device_id)},
|
||||
connections=connections,
|
||||
name=settings.device_name or status.device_id,
|
||||
manufacturer="Airobot",
|
||||
model="Thermostat",
|
||||
model_id="TE1",
|
||||
sw_version=str(status.fw_version),
|
||||
hw_version=str(status.hw_version),
|
||||
)
|
||||
17
homeassistant/components/airobot/manifest.json
Normal file
17
homeassistant/components/airobot/manifest.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"domain": "airobot",
|
||||
"name": "Airobot",
|
||||
"codeowners": ["@mettolen"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "airobot-thermostat-*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airobot",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyairobotrest==0.1.0"]
|
||||
}
|
||||
15
homeassistant/components/airobot/models.py
Normal file
15
homeassistant/components/airobot/models.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Models for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirobotData:
|
||||
"""Data from the Airobot coordinator."""
|
||||
|
||||
status: ThermostatStatus
|
||||
settings: ThermostatSettings
|
||||
70
homeassistant/components/airobot/quality_scale.yaml
Normal file
70
homeassistant/components/airobot/quality_scale.yaml
Normal file
@@ -0,0 +1,70 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not use event subscriptions.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no stale device handling needed.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
44
homeassistant/components/airobot/strings.json
Normal file
44
homeassistant/components/airobot/strings.json
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"dhcp_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The thermostat password."
|
||||
},
|
||||
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||
"password": "The thermostat password.",
|
||||
"username": "The thermostat Device ID (e.g., T01XXXXXX)."
|
||||
},
|
||||
"description": "Enter your Airobot thermostat connection details. Find the Device ID and password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"set_preset_mode_failed": {
|
||||
"message": "Failed to set preset mode to {preset_mode}."
|
||||
},
|
||||
"set_temperature_failed": {
|
||||
"message": "Failed to set temperature to {temperature}."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -45,7 +45,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
return await api.login_mode_interactive(data[CONF_CODE])
|
||||
return await api.login.login_mode_interactive(data[CONF_CODE])
|
||||
|
||||
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -59,7 +59,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
await self.api.login.login_mode_stored_data()
|
||||
data = await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==8.0.1"]
|
||||
"requirements": ["aioamazondevices==9.0.3"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -30,14 +29,36 @@ __all__ = [
|
||||
"async_devices_payload",
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
CONF_SNAPSHOTS_URL = "snapshots_url"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SNAPSHOTS_URL): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the analytics integration."""
|
||||
analytics = Analytics(hass)
|
||||
analytics_config = config.get(DOMAIN, {})
|
||||
|
||||
# For now we want to enable device analytics only if the url option
|
||||
# is explicitly listed in YAML.
|
||||
if CONF_SNAPSHOTS_URL in analytics_config:
|
||||
disable_snapshots = False
|
||||
snapshots_url = analytics_config[CONF_SNAPSHOTS_URL]
|
||||
else:
|
||||
disable_snapshots = True
|
||||
snapshots_url = None
|
||||
|
||||
analytics = Analytics(hass, snapshots_url, disable_snapshots)
|
||||
|
||||
# Load stored data
|
||||
await analytics.load()
|
||||
|
||||
@@ -59,9 +59,6 @@ from homeassistant.loader import (
|
||||
from homeassistant.setup import async_get_loaded_integrations
|
||||
|
||||
from .const import (
|
||||
ANALYTICS_ENDPOINT_URL,
|
||||
ANALYTICS_ENDPOINT_URL_DEV,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
ATTR_ADDON_COUNT,
|
||||
ATTR_ADDONS,
|
||||
ATTR_ARCH,
|
||||
@@ -91,10 +88,14 @@ from .const import (
|
||||
ATTR_USER_COUNT,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
BASIC_ENDPOINT_URL,
|
||||
BASIC_ENDPOINT_URL_DEV,
|
||||
DOMAIN,
|
||||
INTERVAL,
|
||||
LOGGER,
|
||||
PREFERENCE_SCHEMA,
|
||||
SNAPSHOT_DEFAULT_URL,
|
||||
SNAPSHOT_URL_PATH,
|
||||
SNAPSHOT_VERSION,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
@@ -236,10 +237,18 @@ class AnalyticsData:
|
||||
class Analytics:
|
||||
"""Analytics helper class for the analytics integration."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
snapshots_url: str | None = None,
|
||||
disable_snapshots: bool = False,
|
||||
) -> None:
|
||||
"""Initialize the Analytics class."""
|
||||
self.hass: HomeAssistant = hass
|
||||
self.session = async_get_clientsession(hass)
|
||||
self._hass: HomeAssistant = hass
|
||||
self._snapshots_url = snapshots_url
|
||||
self._disable_snapshots = disable_snapshots
|
||||
|
||||
self._session = async_get_clientsession(hass)
|
||||
self._data = AnalyticsData(False, {})
|
||||
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
self._basic_scheduled: CALLBACK_TYPE | None = None
|
||||
@@ -249,13 +258,15 @@ class Analytics:
|
||||
def preferences(self) -> dict:
|
||||
"""Return the current active preferences."""
|
||||
preferences = self._data.preferences
|
||||
return {
|
||||
result = {
|
||||
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
||||
ATTR_SNAPSHOTS: preferences.get(ATTR_SNAPSHOTS, False),
|
||||
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
||||
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
||||
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
||||
}
|
||||
if not self._disable_snapshots:
|
||||
result[ATTR_SNAPSHOTS] = preferences.get(ATTR_SNAPSHOTS, False)
|
||||
return result
|
||||
|
||||
@property
|
||||
def onboarded(self) -> bool:
|
||||
@@ -272,13 +283,13 @@ class Analytics:
|
||||
"""Return the endpoint that will receive the payload."""
|
||||
if RELEASE_CHANNEL is ReleaseChannel.DEV:
|
||||
# dev installations will contact the dev analytics environment
|
||||
return ANALYTICS_ENDPOINT_URL_DEV
|
||||
return ANALYTICS_ENDPOINT_URL
|
||||
return BASIC_ENDPOINT_URL_DEV
|
||||
return BASIC_ENDPOINT_URL
|
||||
|
||||
@property
|
||||
def supervisor(self) -> bool:
|
||||
"""Return bool if a supervisor is present."""
|
||||
return is_hassio(self.hass)
|
||||
return is_hassio(self._hass)
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load preferences."""
|
||||
@@ -288,7 +299,7 @@ class Analytics:
|
||||
|
||||
if (
|
||||
self.supervisor
|
||||
and (supervisor_info := hassio.get_supervisor_info(self.hass)) is not None
|
||||
and (supervisor_info := hassio.get_supervisor_info(self._hass)) is not None
|
||||
):
|
||||
if not self.onboarded:
|
||||
# User have not configured analytics, get this setting from the supervisor
|
||||
@@ -315,7 +326,7 @@ class Analytics:
|
||||
|
||||
if self.supervisor:
|
||||
await hassio.async_update_diagnostics(
|
||||
self.hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
||||
self._hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
||||
)
|
||||
|
||||
async def send_analytics(self, _: datetime | None = None) -> None:
|
||||
@@ -323,7 +334,7 @@ class Analytics:
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
return
|
||||
|
||||
hass = self.hass
|
||||
hass = self._hass
|
||||
supervisor_info = None
|
||||
operating_system_info: dict[str, Any] = {}
|
||||
|
||||
@@ -463,7 +474,7 @@ class Analytics:
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self.session.post(self.endpoint_basic, json=payload)
|
||||
response = await self._session.post(self.endpoint_basic, json=payload)
|
||||
if response.status == 200:
|
||||
LOGGER.info(
|
||||
(
|
||||
@@ -479,11 +490,9 @@ class Analytics:
|
||||
self.endpoint_basic,
|
||||
)
|
||||
except TimeoutError:
|
||||
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
||||
LOGGER.error("Timeout sending analytics to %s", BASIC_ENDPOINT_URL)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error(
|
||||
"Error sending analytics to %s: %r", ANALYTICS_ENDPOINT_URL, err
|
||||
)
|
||||
LOGGER.error("Error sending analytics to %s: %r", BASIC_ENDPOINT_URL, err)
|
||||
|
||||
@callback
|
||||
def _async_should_report_integration(
|
||||
@@ -507,7 +516,7 @@ class Analytics:
|
||||
if not integration.config_flow:
|
||||
return False
|
||||
|
||||
entries = self.hass.config_entries.async_entries(integration.domain)
|
||||
entries = self._hass.config_entries.async_entries(integration.domain)
|
||||
|
||||
# Filter out ignored and disabled entries
|
||||
return any(
|
||||
@@ -521,7 +530,7 @@ class Analytics:
|
||||
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
|
||||
return
|
||||
|
||||
payload = await _async_snapshot_payload(self.hass)
|
||||
payload = await _async_snapshot_payload(self._hass)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
@@ -532,11 +541,16 @@ class Analytics:
|
||||
self._data.submission_identifier
|
||||
)
|
||||
|
||||
url = (
|
||||
self._snapshots_url
|
||||
if self._snapshots_url is not None
|
||||
else SNAPSHOT_DEFAULT_URL
|
||||
)
|
||||
url += SNAPSHOT_URL_PATH
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self.session.post(
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL, json=payload, headers=headers
|
||||
)
|
||||
response = await self._session.post(url, json=payload, headers=headers)
|
||||
|
||||
if response.status == 200: # OK
|
||||
response_data = await response.json()
|
||||
@@ -562,7 +576,7 @@ class Analytics:
|
||||
# Clear the invalid identifier and retry on next cycle
|
||||
LOGGER.warning(
|
||||
"Invalid submission identifier to %s, clearing: %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
url,
|
||||
error_message,
|
||||
)
|
||||
self._data.submission_identifier = None
|
||||
@@ -571,7 +585,7 @@ class Analytics:
|
||||
LOGGER.warning(
|
||||
"Malformed snapshot analytics submission (%s) to %s: %s",
|
||||
error_kind,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
url,
|
||||
error_message,
|
||||
)
|
||||
|
||||
@@ -579,7 +593,7 @@ class Analytics:
|
||||
response_text = await response.text()
|
||||
LOGGER.warning(
|
||||
"Snapshot analytics service %s unavailable: %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
url,
|
||||
response_text,
|
||||
)
|
||||
|
||||
@@ -587,18 +601,18 @@ class Analytics:
|
||||
LOGGER.warning(
|
||||
"Unexpected status code %s when submitting snapshot analytics to %s",
|
||||
response.status,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
url,
|
||||
)
|
||||
|
||||
except TimeoutError:
|
||||
LOGGER.error(
|
||||
"Timeout sending snapshot analytics to %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
url,
|
||||
)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error(
|
||||
"Error sending snapshot analytics to %s: %r",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
url,
|
||||
err,
|
||||
)
|
||||
|
||||
@@ -622,7 +636,7 @@ class Analytics:
|
||||
elif self._basic_scheduled is None:
|
||||
# Wait 15 min after started for basic analytics
|
||||
self._basic_scheduled = async_call_later(
|
||||
self.hass,
|
||||
self._hass,
|
||||
900,
|
||||
HassJob(
|
||||
self._async_schedule_basic,
|
||||
@@ -631,10 +645,7 @@ class Analytics:
|
||||
),
|
||||
)
|
||||
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or RELEASE_CHANNEL not in (
|
||||
ReleaseChannel.DEV,
|
||||
ReleaseChannel.NIGHTLY,
|
||||
):
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or self._disable_snapshots:
|
||||
LOGGER.debug("Snapshot analytics not scheduled")
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
@@ -642,9 +653,11 @@ class Analytics:
|
||||
elif self._snapshot_scheduled is None:
|
||||
snapshot_submission_time = self._data.snapshot_submission_time
|
||||
|
||||
interval_seconds = INTERVAL.total_seconds()
|
||||
|
||||
if snapshot_submission_time is None:
|
||||
# Randomize the submission time within the 24 hours
|
||||
snapshot_submission_time = random.uniform(0, 86400)
|
||||
snapshot_submission_time = random.uniform(0, interval_seconds)
|
||||
self._data.snapshot_submission_time = snapshot_submission_time
|
||||
await self._save()
|
||||
LOGGER.debug(
|
||||
@@ -654,10 +667,10 @@ class Analytics:
|
||||
|
||||
# Calculate delay until next submission
|
||||
current_time = time.time()
|
||||
delay = (snapshot_submission_time - current_time) % 86400
|
||||
delay = (snapshot_submission_time - current_time) % interval_seconds
|
||||
|
||||
self._snapshot_scheduled = async_call_later(
|
||||
self.hass,
|
||||
self._hass,
|
||||
delay,
|
||||
HassJob(
|
||||
self._async_schedule_snapshots,
|
||||
@@ -672,7 +685,7 @@ class Analytics:
|
||||
|
||||
# Send basic analytics every day
|
||||
self._basic_scheduled = async_track_time_interval(
|
||||
self.hass,
|
||||
self._hass,
|
||||
self.send_analytics,
|
||||
INTERVAL,
|
||||
name="basic analytics daily",
|
||||
@@ -685,7 +698,7 @@ class Analytics:
|
||||
|
||||
# Send snapshot analytics every day
|
||||
self._snapshot_scheduled = async_track_time_interval(
|
||||
self.hass,
|
||||
self._hass,
|
||||
self.send_snapshot,
|
||||
INTERVAL,
|
||||
name="snapshot analytics daily",
|
||||
|
||||
@@ -5,15 +5,17 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
SNAPSHOT_VERSION = "1"
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL = f"https://device-database.eco-dev-aws.openhomefoundation.com/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||
DOMAIN = "analytics"
|
||||
INTERVAL = timedelta(days=1)
|
||||
STORAGE_KEY = "core.analytics"
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
BASIC_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
BASIC_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
|
||||
SNAPSHOT_VERSION = 1
|
||||
SNAPSHOT_DEFAULT_URL = "https://device-database.eco-dev-aws.openhomefoundation.com"
|
||||
SNAPSHOT_URL_PATH = f"/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||
|
||||
LOGGER: logging.Logger = logging.getLogger(__package__)
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/awair",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["python_awair"],
|
||||
"requirements": ["python-awair==0.2.4"],
|
||||
"requirements": ["python-awair==0.2.5"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "awair*",
|
||||
|
||||
@@ -21,10 +21,10 @@ from .const import (
|
||||
ATTR_ITEM_NUMBER,
|
||||
ATTR_SERIAL_NUMBER,
|
||||
ATTR_TYPE_NUMBER,
|
||||
COMPATIBLE_MODELS,
|
||||
CONF_SERIAL_NUMBER,
|
||||
DEFAULT_MODEL,
|
||||
DOMAIN,
|
||||
SELECTABLE_MODELS,
|
||||
)
|
||||
from .util import get_serial_number_from_jid
|
||||
|
||||
@@ -70,7 +70,7 @@ class BangOlufsenConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_MODEL, default=DEFAULT_MODEL): SelectSelector(
|
||||
SelectSelectorConfig(options=COMPATIBLE_MODELS)
|
||||
SelectSelectorConfig(options=SELECTABLE_MODELS)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -62,6 +62,7 @@ class BangOlufsenMediaType(StrEnum):
|
||||
class BangOlufsenModel(StrEnum):
|
||||
"""Enum for compatible model names."""
|
||||
|
||||
# Mozart devices
|
||||
BEOCONNECT_CORE = "Beoconnect Core"
|
||||
BEOLAB_8 = "BeoLab 8"
|
||||
BEOLAB_28 = "BeoLab 28"
|
||||
@@ -71,7 +72,26 @@ class BangOlufsenModel(StrEnum):
|
||||
BEOSOUND_BALANCE = "Beosound Balance"
|
||||
BEOSOUND_EMERGE = "Beosound Emerge"
|
||||
BEOSOUND_LEVEL = "Beosound Level"
|
||||
BEOSOUND_PREMIERE = "Beosound Premiere"
|
||||
BEOSOUND_THEATRE = "Beosound Theatre"
|
||||
# Remote devices
|
||||
BEOREMOTE_ONE = "Beoremote One"
|
||||
|
||||
|
||||
# Physical "buttons" on devices
|
||||
class BangOlufsenButtons(StrEnum):
|
||||
"""Enum for device buttons."""
|
||||
|
||||
BLUETOOTH = "Bluetooth"
|
||||
MICROPHONE = "Microphone"
|
||||
NEXT = "Next"
|
||||
PLAY_PAUSE = "PlayPause"
|
||||
PRESET_1 = "Preset1"
|
||||
PRESET_2 = "Preset2"
|
||||
PRESET_3 = "Preset3"
|
||||
PRESET_4 = "Preset4"
|
||||
PREVIOUS = "Previous"
|
||||
VOLUME = "Volume"
|
||||
|
||||
|
||||
# Dispatcher events
|
||||
@@ -79,6 +99,7 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BEO_REMOTE_BUTTON = "beo_remote_button"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
@@ -96,6 +117,7 @@ class WebsocketNotification(StrEnum):
|
||||
BEOLINK_AVAILABLE_LISTENERS = "beolinkAvailableListeners"
|
||||
CONFIGURATION = "configuration"
|
||||
NOTIFICATION = "notification"
|
||||
REMOTE_CONTROL_DEVICES = "remoteControlDevices"
|
||||
REMOTE_MENU_CHANGED = "remoteMenuChanged"
|
||||
|
||||
ALL = "all"
|
||||
@@ -111,7 +133,11 @@ CONF_SERIAL_NUMBER: Final = "serial_number"
|
||||
CONF_BEOLINK_JID: Final = "jid"
|
||||
|
||||
# Models to choose from in manual configuration.
|
||||
COMPATIBLE_MODELS: list[str] = [x.value for x in BangOlufsenModel]
|
||||
SELECTABLE_MODELS: list[str] = [
|
||||
model.value for model in BangOlufsenModel if model != BangOlufsenModel.BEOREMOTE_ONE
|
||||
]
|
||||
|
||||
MANUFACTURER: Final[str] = "Bang & Olufsen"
|
||||
|
||||
# Attribute names for zeroconf discovery.
|
||||
ATTR_TYPE_NUMBER: Final[str] = "tn"
|
||||
@@ -204,29 +230,16 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
),
|
||||
]
|
||||
)
|
||||
# Map for storing compatibility of devices.
|
||||
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
|
||||
|
||||
MODEL_SUPPORT_MAP = {
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: (
|
||||
BangOlufsenModel.BEOLAB_8,
|
||||
BangOlufsenModel.BEOLAB_28,
|
||||
BangOlufsenModel.BEOSOUND_2,
|
||||
BangOlufsenModel.BEOSOUND_A5,
|
||||
BangOlufsenModel.BEOSOUND_A9,
|
||||
BangOlufsenModel.BEOSOUND_BALANCE,
|
||||
BangOlufsenModel.BEOSOUND_EMERGE,
|
||||
BangOlufsenModel.BEOSOUND_LEVEL,
|
||||
BangOlufsenModel.BEOSOUND_THEATRE,
|
||||
)
|
||||
}
|
||||
|
||||
# Device events
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
|
||||
EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
# Beoremote One
|
||||
"KeyPress": "key_press",
|
||||
"KeyRelease": "key_release",
|
||||
# Physical "buttons"
|
||||
"shortPress (Release)": "short_press_release",
|
||||
"longPress (Timeout)": "long_press_timeout",
|
||||
"longPress (Release)": "long_press_release",
|
||||
@@ -236,18 +249,7 @@ EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
DEVICE_BUTTONS: Final[list[str]] = [
|
||||
"Bluetooth",
|
||||
"Microphone",
|
||||
"Next",
|
||||
"PlayPause",
|
||||
"Preset1",
|
||||
"Preset2",
|
||||
"Preset3",
|
||||
"Preset4",
|
||||
"Previous",
|
||||
"Volume",
|
||||
]
|
||||
DEVICE_BUTTONS: Final[list[str]] = [x.value for x in BangOlufsenButtons]
|
||||
|
||||
|
||||
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
@@ -258,6 +260,70 @@ DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
"very_long_press_release",
|
||||
]
|
||||
|
||||
BEO_REMOTE_SUBMENU_CONTROL: Final[str] = "Control"
|
||||
BEO_REMOTE_SUBMENU_LIGHT: Final[str] = "Light"
|
||||
|
||||
# Common for both submenus
|
||||
BEO_REMOTE_KEYS: Final[tuple[str, ...]] = (
|
||||
"Blue",
|
||||
"Digit0",
|
||||
"Digit1",
|
||||
"Digit2",
|
||||
"Digit3",
|
||||
"Digit4",
|
||||
"Digit5",
|
||||
"Digit6",
|
||||
"Digit7",
|
||||
"Digit8",
|
||||
"Digit9",
|
||||
"Down",
|
||||
"Green",
|
||||
"Left",
|
||||
"Play",
|
||||
"Red",
|
||||
"Rewind",
|
||||
"Right",
|
||||
"Select",
|
||||
"Stop",
|
||||
"Up",
|
||||
"Wind",
|
||||
"Yellow",
|
||||
"Func1",
|
||||
"Func2",
|
||||
"Func3",
|
||||
"Func4",
|
||||
"Func5",
|
||||
"Func6",
|
||||
"Func7",
|
||||
"Func8",
|
||||
"Func9",
|
||||
"Func10",
|
||||
"Func11",
|
||||
"Func12",
|
||||
"Func13",
|
||||
"Func14",
|
||||
"Func15",
|
||||
"Func16",
|
||||
"Func17",
|
||||
)
|
||||
|
||||
# "keys" that are unique to the Control submenu
|
||||
BEO_REMOTE_CONTROL_KEYS: Final[tuple[str, ...]] = (
|
||||
"Func18",
|
||||
"Func19",
|
||||
"Func20",
|
||||
"Func21",
|
||||
"Func22",
|
||||
"Func23",
|
||||
"Func24",
|
||||
"Func25",
|
||||
"Func26",
|
||||
"Func27",
|
||||
)
|
||||
|
||||
BEO_REMOTE_KEY_EVENTS: Final[list[str]] = ["key_press", "key_release"]
|
||||
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
|
||||
@@ -6,11 +6,13 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import DEVICE_BUTTONS, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .util import get_device_buttons
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -40,7 +42,7 @@ async def async_get_config_entry_diagnostics(
|
||||
data["media_player"] = state_dict
|
||||
|
||||
# Add button Event entity states (if enabled)
|
||||
for device_button in DEVICE_BUTTONS:
|
||||
for device_button in get_device_buttons(config_entry.data[CONF_MODEL]):
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
EVENT_DOMAIN, DOMAIN, f"{config_entry.unique_id}_{device_button}"
|
||||
):
|
||||
|
||||
@@ -2,22 +2,34 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import PairedRemote
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
BEO_REMOTE_CONTROL_KEYS,
|
||||
BEO_REMOTE_KEY_EVENTS,
|
||||
BEO_REMOTE_KEYS,
|
||||
BEO_REMOTE_SUBMENU_CONTROL,
|
||||
BEO_REMOTE_SUBMENU_LIGHT,
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_MAP,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
BangOlufsenModel,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
from .util import get_device_buttons, get_remotes
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -27,25 +39,87 @@ async def async_setup_entry(
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
"""Set up Event entities from config entry."""
|
||||
entities: list[BangOlufsenEvent] = []
|
||||
|
||||
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in DEVICE_BUTTONS
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in get_device_buttons(config_entry.data[CONF_MODEL])
|
||||
)
|
||||
|
||||
# Check for connected Beoremote One
|
||||
remotes = await get_remotes(config_entry.runtime_data.client)
|
||||
|
||||
for remote in remotes:
|
||||
# Add Light keys
|
||||
entities.extend(
|
||||
[
|
||||
BangOlufsenRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}",
|
||||
)
|
||||
for key_type in BEO_REMOTE_KEYS
|
||||
]
|
||||
)
|
||||
|
||||
# Add Control keys
|
||||
entities.extend(
|
||||
[
|
||||
BangOlufsenRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}",
|
||||
)
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
]
|
||||
)
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Event class for Button events."""
|
||||
# If the remote is no longer available, then delete the device.
|
||||
# The remote may appear as being available to the device after it has been unpaired on the remote
|
||||
# As it has to be removed from the device on the app.
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
devices = device_registry.devices.get_devices_for_config_entry_id(
|
||||
config_entry.entry_id
|
||||
)
|
||||
for device in devices:
|
||||
if (
|
||||
device.model == BangOlufsenModel.BEOREMOTE_ONE
|
||||
and device.serial_number not in {remote.serial_number for remote in remotes}
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id, remove_config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
async_add_entities(new_entities=entities)
|
||||
|
||||
|
||||
class BangOlufsenEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Base Event class."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
_attr_entity_registry_enabled_default = False
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry) -> None:
|
||||
"""Initialize Event."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEvent):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_event_types = DEVICE_BUTTON_EVENTS
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
|
||||
"""Initialize Button."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
super().__init__(config_entry)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_{button_type}"
|
||||
|
||||
@@ -59,20 +133,65 @@ class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
class BangOlufsenRemoteKeyEvent(BangOlufsenEvent):
|
||||
"""Event class for Beoremote One key events."""
|
||||
|
||||
_attr_event_types = BEO_REMOTE_KEY_EVENTS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
remote: PairedRemote,
|
||||
key_type: str,
|
||||
) -> None:
|
||||
"""Initialize Beoremote One key."""
|
||||
super().__init__(config_entry)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert remote.serial_number
|
||||
|
||||
self._attr_unique_id = f"{remote.serial_number}_{self._unique_id}_{key_type}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")},
|
||||
name=f"{BangOlufsenModel.BEOREMOTE_ONE}-{remote.serial_number}-{self._unique_id}",
|
||||
model=BangOlufsenModel.BEOREMOTE_ONE,
|
||||
serial_number=remote.serial_number,
|
||||
sw_version=remote.app_version,
|
||||
manufacturer=MANUFACTURER,
|
||||
via_device=(DOMAIN, self._unique_id),
|
||||
)
|
||||
|
||||
# Make the native key name Home Assistant compatible
|
||||
self._attr_translation_key = key_type.lower().replace("/", "_")
|
||||
|
||||
self._key_type = key_type
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Listen to WebSocket Beoremote One key events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{self._key_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,4 +1,278 @@
|
||||
{
|
||||
"entity": {
|
||||
"event": {
|
||||
"control_blue": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit0": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_down": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func10": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func11": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func12": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func13": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func14": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func15": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func16": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func17": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func18": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func19": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func20": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func21": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func22": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func23": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func24": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func25": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func26": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func27": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_green": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_left": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_play": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_red": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_rewind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_right": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_select": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_stop": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_up": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_wind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_yellow": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_blue": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit0": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_down": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func10": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func11": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func12": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func13": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func14": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func15": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func16": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func17": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_green": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_left": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_play": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_red": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_rewind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_right": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_select": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_stop": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_up": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_wind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_yellow": {
|
||||
"default": "mdi:remote"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" },
|
||||
"beolink_expand": { "service": "mdi:location-enter" },
|
||||
|
||||
@@ -80,6 +80,7 @@ from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
FALLBACK_SOURCES,
|
||||
MANUFACTURER,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
@@ -201,7 +202,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{self._host}/#/",
|
||||
identifiers={(DOMAIN, self._unique_id)},
|
||||
manufacturer="Bang & Olufsen",
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self._model,
|
||||
serial_number=self._unique_id,
|
||||
)
|
||||
@@ -249,7 +250,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{signal}",
|
||||
f"{DOMAIN}_{self._unique_id}_{signal}",
|
||||
signal_handler,
|
||||
)
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,11 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from mozart_api.models import PairedRemote
|
||||
from mozart_api.mozart_client import MozartClient
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEVICE_BUTTONS, DOMAIN, BangOlufsenButtons, BangOlufsenModel
|
||||
|
||||
|
||||
def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
@@ -21,3 +26,30 @@ def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
def get_serial_number_from_jid(jid: str) -> str:
|
||||
"""Get serial number from Beolink JID."""
|
||||
return jid.split(".")[2].split("@")[0]
|
||||
|
||||
|
||||
async def get_remotes(client: MozartClient) -> list[PairedRemote]:
|
||||
"""Get paired remotes."""
|
||||
|
||||
bluetooth_remote_list = await client.get_bluetooth_remotes()
|
||||
|
||||
return [
|
||||
remote
|
||||
for remote in cast(list[PairedRemote], bluetooth_remote_list.items)
|
||||
if remote.serial_number is not None
|
||||
]
|
||||
|
||||
|
||||
def get_device_buttons(model: BangOlufsenModel) -> list[str]:
|
||||
"""Get supported buttons for a given model."""
|
||||
buttons = DEVICE_BUTTONS.copy()
|
||||
|
||||
# Beosound Premiere does not have a bluetooth button
|
||||
if model == BangOlufsenModel.BEOSOUND_PREMIERE:
|
||||
buttons.remove(BangOlufsenButtons.BLUETOOTH)
|
||||
|
||||
# Beoconnect Core does not have any buttons
|
||||
elif model == BangOlufsenModel.BEOCONNECT_CORE:
|
||||
buttons = []
|
||||
|
||||
return buttons
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
BeoRemoteButton,
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
PlaybackContentMetadata,
|
||||
@@ -28,11 +29,13 @@ from homeassistant.util.enum import try_parse_enum
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
EVENT_TRANSLATION_MAP,
|
||||
BangOlufsenModel,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenBase
|
||||
from .util import get_device
|
||||
from .util import get_device, get_remotes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,6 +60,9 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_beo_remote_button_notifications(
|
||||
self.on_beo_remote_button_notification
|
||||
)
|
||||
self._client.get_button_notifications(self.on_button_notification)
|
||||
|
||||
self._client.get_playback_error_notifications(
|
||||
@@ -87,7 +93,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Update all entities of the connection status."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._client.websocket_connected,
|
||||
)
|
||||
|
||||
@@ -105,10 +111,22 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send active_listening_mode dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_beo_remote_button_notification(self, notification: BeoRemoteButton) -> None:
|
||||
"""Send beo_remote_button dispatch."""
|
||||
if TYPE_CHECKING:
|
||||
assert notification.type
|
||||
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{notification.key}",
|
||||
EVENT_TRANSLATION_MAP[notification.type],
|
||||
)
|
||||
|
||||
def on_button_notification(self, notification: ButtonEvent) -> None:
|
||||
"""Send button dispatch."""
|
||||
# State is expected to always be available.
|
||||
@@ -118,11 +136,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
EVENT_TRANSLATION_MAP[notification.state],
|
||||
)
|
||||
|
||||
def on_notification_notification(
|
||||
async def on_notification_notification(
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
"""Send notification dispatch."""
|
||||
@@ -136,24 +154,51 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
):
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.CONFIGURATION:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
)
|
||||
|
||||
# This notification is triggered by a remote pairing, unpairing and connecting to a device
|
||||
# So the current remote devices have to be compared to available remotes to determine action
|
||||
elif notification_type is WebsocketNotification.REMOTE_CONTROL_DEVICES:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
# Get remote devices connected to the device from Home Assistant
|
||||
device_serial_numbers = [
|
||||
device.serial_number
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
self.entry.entry_id
|
||||
)
|
||||
if device.serial_number is not None
|
||||
and device.model == BangOlufsenModel.BEOREMOTE_ONE
|
||||
]
|
||||
# Get paired remotes from device
|
||||
remote_serial_numbers = [
|
||||
remote.serial_number
|
||||
for remote in await get_remotes(self._client)
|
||||
if remote.serial_number is not None
|
||||
]
|
||||
# Check if number of remote devices correspond to number of paired remotes
|
||||
if len(remote_serial_numbers) != len(device_serial_numbers):
|
||||
_LOGGER.info(
|
||||
"A Beoremote One has been paired or unpaired to %s. Reloading config entry to add device and entities",
|
||||
self.entry.title,
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(self.entry.entry_id)
|
||||
|
||||
def on_playback_error_notification(self, notification: PlaybackError) -> None:
|
||||
"""Send playback_error dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -163,7 +208,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_metadata dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -171,7 +216,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_progress dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -179,7 +224,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_state dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -187,7 +232,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_source dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -195,7 +240,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send source_change dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -203,7 +248,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send volume dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.VOLUME}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.VOLUME}",
|
||||
notification,
|
||||
)
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==3.0.0",
|
||||
"dbus-fast==3.1.2",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
model_id=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==5.1.1"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["brother==6.0.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "brother*",
|
||||
|
||||
78
homeassistant/components/brother/quality_scale.yaml
Normal file
78
homeassistant/components/brother/quality_scale.yaml
Normal file
@@ -0,0 +1,78 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: SNMP doesn't return error identifying an authentication problem, to change the SNMP community (simple password) the user should use reconfigure flow.
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single device.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The integration does not connect via HTTP instead it uses a shared SNMP engine.
|
||||
strict-typing: done
|
||||
@@ -17,7 +17,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -345,12 +345,10 @@ class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_native_value = self.entity_description.value(self.coordinator.data)
|
||||
self.async_write_ha_state()
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the native value of the sensor."""
|
||||
return self.entity_description.value(self.coordinator.data)
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections.abc import Awaitable, Callable
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
import logging
|
||||
from typing import cast
|
||||
from typing import Any, cast
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
import voluptuous as vol
|
||||
@@ -86,6 +86,10 @@ SIGNAL_CLOUD_CONNECTION_STATE: SignalType[CloudConnectionState] = SignalType(
|
||||
"CLOUD_CONNECTION_STATE"
|
||||
)
|
||||
|
||||
_SIGNAL_CLOUDHOOKS_UPDATED: SignalType[dict[str, Any]] = SignalType(
|
||||
"CLOUDHOOKS_UPDATED"
|
||||
)
|
||||
|
||||
STARTUP_REPAIR_DELAY = 1 # 1 hour
|
||||
|
||||
ALEXA_ENTITY_SCHEMA = vol.Schema(
|
||||
@@ -242,6 +246,24 @@ async def async_delete_cloudhook(hass: HomeAssistant, webhook_id: str) -> None:
|
||||
await hass.data[DATA_CLOUD].cloudhooks.async_delete(webhook_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen_cloudhook_change(
|
||||
hass: HomeAssistant,
|
||||
webhook_id: str,
|
||||
on_change: Callable[[dict[str, Any] | None], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for cloudhook changes for the given webhook and notify when modified or deleted."""
|
||||
|
||||
@callback
|
||||
def _handle_cloudhooks_updated(cloudhooks: dict[str, Any]) -> None:
|
||||
"""Handle cloudhooks updated signal."""
|
||||
on_change(cloudhooks.get(webhook_id))
|
||||
|
||||
return async_dispatcher_connect(
|
||||
hass, _SIGNAL_CLOUDHOOKS_UPDATED, _handle_cloudhooks_updated
|
||||
)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@callback
|
||||
def async_remote_ui_url(hass: HomeAssistant) -> str:
|
||||
@@ -289,7 +311,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||
|
||||
_remote_handle_prefs_updated(cloud)
|
||||
_handle_prefs_updated(hass, cloud)
|
||||
_setup_services(hass, prefs)
|
||||
|
||||
async def async_startup_repairs(_: datetime) -> None:
|
||||
@@ -373,26 +395,32 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
|
||||
@callback
|
||||
def _remote_handle_prefs_updated(cloud: Cloud[CloudClient]) -> None:
|
||||
"""Handle remote preferences updated."""
|
||||
cur_pref = cloud.client.prefs.remote_enabled
|
||||
def _handle_prefs_updated(hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None:
|
||||
"""Register handler for cloud preferences updates."""
|
||||
cur_remote_enabled = cloud.client.prefs.remote_enabled
|
||||
cur_cloudhooks = cloud.client.prefs.cloudhooks
|
||||
lock = asyncio.Lock()
|
||||
|
||||
# Sync remote connection with prefs
|
||||
async def remote_prefs_updated(prefs: CloudPreferences) -> None:
|
||||
"""Update remote status."""
|
||||
nonlocal cur_pref
|
||||
async def on_prefs_updated(prefs: CloudPreferences) -> None:
|
||||
"""Handle cloud preferences updates."""
|
||||
nonlocal cur_remote_enabled
|
||||
nonlocal cur_cloudhooks
|
||||
|
||||
# Lock protects cur_ state variables from concurrent updates
|
||||
async with lock:
|
||||
if prefs.remote_enabled == cur_pref:
|
||||
if cur_cloudhooks != prefs.cloudhooks:
|
||||
cur_cloudhooks = prefs.cloudhooks
|
||||
async_dispatcher_send(hass, _SIGNAL_CLOUDHOOKS_UPDATED, cur_cloudhooks)
|
||||
|
||||
if prefs.remote_enabled == cur_remote_enabled:
|
||||
return
|
||||
|
||||
if cur_pref := prefs.remote_enabled:
|
||||
if cur_remote_enabled := prefs.remote_enabled:
|
||||
await cloud.remote.connect()
|
||||
else:
|
||||
await cloud.remote.disconnect()
|
||||
|
||||
cloud.client.prefs.async_listen_updates(remote_prefs_updated)
|
||||
cloud.client.prefs.async_listen_updates(on_prefs_updated)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.5.1"],
|
||||
"requirements": ["hass-nabucasa==1.6.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_create_area)
|
||||
websocket_api.async_register_command(hass, websocket_delete_area)
|
||||
websocket_api.async_register_command(hass, websocket_update_area)
|
||||
websocket_api.async_register_command(hass, websocket_reorder_areas)
|
||||
return True
|
||||
|
||||
|
||||
@@ -145,3 +146,27 @@ def websocket_update_area(
|
||||
connection.send_error(msg["id"], "invalid_info", str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"], entry.json_fragment)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "config/area_registry/reorder",
|
||||
vol.Required("area_ids"): [str],
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_reorder_areas(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Handle reorder areas websocket command."""
|
||||
registry = ar.async_get(hass)
|
||||
|
||||
try:
|
||||
registry.async_reorder(msg["area_ids"])
|
||||
except ValueError as err:
|
||||
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -18,6 +18,7 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_create_floor)
|
||||
websocket_api.async_register_command(hass, websocket_delete_floor)
|
||||
websocket_api.async_register_command(hass, websocket_update_floor)
|
||||
websocket_api.async_register_command(hass, websocket_reorder_floors)
|
||||
return True
|
||||
|
||||
|
||||
@@ -127,6 +128,28 @@ def websocket_update_floor(
|
||||
connection.send_result(msg["id"], _entry_dict(entry))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "config/floor_registry/reorder",
|
||||
vol.Required("floor_ids"): [str],
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_reorder_floors(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle reorder floors websocket command."""
|
||||
registry = fr.async_get(hass)
|
||||
|
||||
try:
|
||||
registry.async_reorder(msg["floor_ids"])
|
||||
except ValueError as err:
|
||||
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
def _entry_dict(entry: FloorEntry) -> dict[str, Any]:
|
||||
"""Convert entry to API format."""
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import AsyncGenerator, AsyncIterable, Callable, Generator
|
||||
from contextlib import contextmanager
|
||||
from contextvars import ContextVar
|
||||
from dataclasses import asdict, dataclass, field, replace
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TypedDict, cast
|
||||
@@ -16,14 +17,18 @@ import voluptuous as vol
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import chat_session, frame, intent, llm, template
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import trace
|
||||
from .const import ChatLogEventType
|
||||
from .models import ConversationInput, ConversationResult
|
||||
|
||||
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
|
||||
|
||||
DATA_SUBSCRIPTIONS: HassKey[
|
||||
list[Callable[[str, ChatLogEventType, dict[str, Any]], None]]
|
||||
] = HassKey("conversation_chat_log_subscriptions")
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
current_chat_log: ContextVar[ChatLog | None] = ContextVar(
|
||||
@@ -31,6 +36,40 @@ current_chat_log: ContextVar[ChatLog | None] = ContextVar(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_subscribe_chat_logs(
|
||||
hass: HomeAssistant,
|
||||
callback_func: Callable[[str, ChatLogEventType, dict[str, Any]], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to all chat logs."""
|
||||
subscriptions = hass.data.get(DATA_SUBSCRIPTIONS)
|
||||
if subscriptions is None:
|
||||
subscriptions = []
|
||||
hass.data[DATA_SUBSCRIPTIONS] = subscriptions
|
||||
|
||||
subscriptions.append(callback_func)
|
||||
|
||||
@callback
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from chat logs."""
|
||||
subscriptions.remove(callback_func)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_subscribers(
|
||||
hass: HomeAssistant,
|
||||
conversation_id: str,
|
||||
event_type: ChatLogEventType,
|
||||
data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Notify subscribers of a chat log event."""
|
||||
if subscriptions := hass.data.get(DATA_SUBSCRIPTIONS):
|
||||
for callback_func in subscriptions:
|
||||
callback_func(conversation_id, event_type, data)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def async_get_chat_log(
|
||||
hass: HomeAssistant,
|
||||
@@ -63,6 +102,8 @@ def async_get_chat_log(
|
||||
all_chat_logs = {}
|
||||
hass.data[DATA_CHAT_LOGS] = all_chat_logs
|
||||
|
||||
is_new_log = session.conversation_id not in all_chat_logs
|
||||
|
||||
if chat_log := all_chat_logs.get(session.conversation_id):
|
||||
chat_log = replace(chat_log, content=chat_log.content.copy())
|
||||
else:
|
||||
@@ -71,6 +112,15 @@ def async_get_chat_log(
|
||||
if chat_log_delta_listener:
|
||||
chat_log.delta_listener = chat_log_delta_listener
|
||||
|
||||
# Fire CREATED event for new chat logs before any content is added
|
||||
if is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.CREATED,
|
||||
{"chat_log": chat_log.as_dict()},
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
chat_log.async_add_user_content(UserContent(content=user_input.text))
|
||||
|
||||
@@ -84,14 +134,28 @@ def async_get_chat_log(
|
||||
LOGGER.debug(
|
||||
"Chat Log opened but no assistant message was added, ignoring update"
|
||||
)
|
||||
# If this was a new log but nothing was added, fire DELETED to clean up
|
||||
if is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.DELETED,
|
||||
{},
|
||||
)
|
||||
return
|
||||
|
||||
if session.conversation_id not in all_chat_logs:
|
||||
if is_new_log:
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
"""Handle cleanup."""
|
||||
all_chat_logs.pop(session.conversation_id)
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.DELETED,
|
||||
{},
|
||||
)
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
@@ -100,6 +164,16 @@ def async_get_chat_log(
|
||||
|
||||
all_chat_logs[session.conversation_id] = chat_log
|
||||
|
||||
# For new logs, CREATED was already fired before content was added
|
||||
# For existing logs, fire UPDATED
|
||||
if not is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.UPDATED,
|
||||
{"chat_log": chat_log.as_dict()},
|
||||
)
|
||||
|
||||
|
||||
class ConverseError(HomeAssistantError):
|
||||
"""Error during initialization of conversation.
|
||||
@@ -129,6 +203,15 @@ class SystemContent:
|
||||
|
||||
role: Literal["system"] = field(init=False, default="system")
|
||||
content: str
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
return {
|
||||
"role": self.role,
|
||||
"content": self.content,
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -138,6 +221,20 @@ class UserContent:
|
||||
role: Literal["user"] = field(init=False, default="user")
|
||||
content: str
|
||||
attachments: list[Attachment] | None = field(default=None)
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
result: dict[str, Any] = {
|
||||
"role": self.role,
|
||||
"content": self.content,
|
||||
"created": self.created,
|
||||
}
|
||||
if self.attachments:
|
||||
result["attachments"] = [
|
||||
attachment.as_dict() for attachment in self.attachments
|
||||
]
|
||||
return result
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -153,6 +250,14 @@ class Attachment:
|
||||
path: Path
|
||||
"""Path to the attachment on disk."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the attachment."""
|
||||
return {
|
||||
"media_content_id": self.media_content_id,
|
||||
"mime_type": self.mime_type,
|
||||
"path": str(self.path),
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AssistantContent:
|
||||
@@ -164,6 +269,22 @@ class AssistantContent:
|
||||
thinking_content: str | None = None
|
||||
tool_calls: list[llm.ToolInput] | None = None
|
||||
native: Any = None
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
result: dict[str, Any] = {
|
||||
"role": self.role,
|
||||
"agent_id": self.agent_id,
|
||||
"created": self.created,
|
||||
}
|
||||
if self.content:
|
||||
result["content"] = self.content
|
||||
if self.thinking_content:
|
||||
result["thinking_content"] = self.thinking_content
|
||||
if self.tool_calls:
|
||||
result["tool_calls"] = self.tool_calls
|
||||
return result
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -175,6 +296,18 @@ class ToolResultContent:
|
||||
tool_call_id: str
|
||||
tool_name: str
|
||||
tool_result: JsonObjectType
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
return {
|
||||
"role": self.role,
|
||||
"agent_id": self.agent_id,
|
||||
"tool_call_id": self.tool_call_id,
|
||||
"tool_name": self.tool_name,
|
||||
"tool_result": self.tool_result,
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
|
||||
type Content = SystemContent | UserContent | AssistantContent | ToolResultContent
|
||||
@@ -210,6 +343,16 @@ class ChatLog:
|
||||
llm_api: llm.APIInstance | None = None
|
||||
delta_listener: Callable[[ChatLog, dict], None] | None = None
|
||||
llm_input_provided_index = 0
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the chat log."""
|
||||
return {
|
||||
"conversation_id": self.conversation_id,
|
||||
"continue_conversation": self.continue_conversation,
|
||||
"content": [c.as_dict() for c in self.content],
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
@property
|
||||
def continue_conversation(self) -> bool:
|
||||
@@ -241,6 +384,12 @@ class ChatLog:
|
||||
"""Add user content to the log."""
|
||||
LOGGER.debug("Adding user content: %s", content)
|
||||
self.content.append(content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{"content": content.as_dict()},
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_add_assistant_content_without_tools(
|
||||
@@ -259,6 +408,12 @@ class ChatLog:
|
||||
):
|
||||
raise ValueError("Non-external tool calls not allowed")
|
||||
self.content.append(content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{"content": content.as_dict()},
|
||||
)
|
||||
|
||||
async def async_add_assistant_content(
|
||||
self,
|
||||
@@ -317,6 +472,14 @@ class ChatLog:
|
||||
tool_result=tool_result,
|
||||
)
|
||||
self.content.append(response_content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{
|
||||
"content": response_content.as_dict(),
|
||||
},
|
||||
)
|
||||
yield response_content
|
||||
|
||||
async def async_add_delta_content_stream(
|
||||
@@ -593,6 +756,12 @@ class ChatLog:
|
||||
self.llm_api = llm_api
|
||||
self.extra_system_prompt = extra_system_prompt
|
||||
self.content[0] = SystemContent(content=prompt)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.UPDATED,
|
||||
{"chat_log": self.as_dict()},
|
||||
)
|
||||
|
||||
LOGGER.debug("Prompt: %s", self.content)
|
||||
LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntFlag
|
||||
from enum import IntFlag, StrEnum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
@@ -34,3 +34,13 @@ class ConversationEntityFeature(IntFlag):
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
|
||||
|
||||
class ChatLogEventType(StrEnum):
|
||||
"""Chat log event type."""
|
||||
|
||||
INITIAL_STATE = "initial_state"
|
||||
CREATED = "created"
|
||||
UPDATED = "updated"
|
||||
DELETED = "deleted"
|
||||
CONTENT_ADDED = "content_added"
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.components import http, websocket_api
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.util import language as language_util
|
||||
|
||||
from .agent_manager import (
|
||||
@@ -20,7 +21,8 @@ from .agent_manager import (
|
||||
async_get_agent,
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT
|
||||
from .chat_log import DATA_CHAT_LOGS, async_get_chat_log, async_subscribe_chat_logs
|
||||
from .const import DATA_COMPONENT, ChatLogEventType
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput
|
||||
|
||||
@@ -35,6 +37,8 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_list_sentences)
|
||||
websocket_api.async_register_command(hass, websocket_hass_agent_debug)
|
||||
websocket_api.async_register_command(hass, websocket_hass_agent_language_scores)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_chat_log)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_chat_log_index)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
@@ -265,3 +269,114 @@ class ConversationProcessView(http.HomeAssistantView):
|
||||
)
|
||||
|
||||
return self.json(result.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/chat_log/subscribe",
|
||||
vol.Required("conversation_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
def websocket_subscribe_chat_log(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a chat log."""
|
||||
msg_id = msg["id"]
|
||||
subscribed_conversation = msg["conversation_id"]
|
||||
|
||||
chat_logs = hass.data.get(DATA_CHAT_LOGS)
|
||||
|
||||
if not chat_logs or subscribed_conversation not in chat_logs:
|
||||
connection.send_error(
|
||||
msg_id,
|
||||
websocket_api.ERR_NOT_FOUND,
|
||||
"Conversation chat log not found",
|
||||
)
|
||||
return
|
||||
|
||||
@callback
|
||||
def forward_events(conversation_id: str, event_type: str, data: dict) -> None:
|
||||
"""Forward chat log events to websocket connection."""
|
||||
if conversation_id != subscribed_conversation:
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"conversation_id": conversation_id,
|
||||
"event_type": event_type,
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
if event_type == ChatLogEventType.DELETED:
|
||||
unsubscribe()
|
||||
del connection.subscriptions[msg_id]
|
||||
|
||||
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
|
||||
connection.subscriptions[msg_id] = unsubscribe
|
||||
connection.send_result(msg_id)
|
||||
|
||||
with (
|
||||
async_get_chat_session(hass, subscribed_conversation) as session,
|
||||
async_get_chat_log(hass, session) as chat_log,
|
||||
):
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"event_type": ChatLogEventType.INITIAL_STATE,
|
||||
"data": chat_log.as_dict(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/chat_log/subscribe_index",
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
def websocket_subscribe_chat_log_index(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a chat log."""
|
||||
msg_id = msg["id"]
|
||||
|
||||
@callback
|
||||
def forward_events(
|
||||
conversation_id: str, event_type: ChatLogEventType, data: dict
|
||||
) -> None:
|
||||
"""Forward chat log events to websocket connection."""
|
||||
if event_type not in (ChatLogEventType.CREATED, ChatLogEventType.DELETED):
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"conversation_id": conversation_id,
|
||||
"event_type": event_type,
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
|
||||
connection.subscriptions[msg["id"]] = unsubscribe
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
chat_logs = hass.data.get(DATA_CHAT_LOGS)
|
||||
|
||||
if not chat_logs:
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"event_type": ChatLogEventType.INITIAL_STATE,
|
||||
"data": [c.as_dict() for c in chat_logs.values()],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.24"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -25,6 +26,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -167,6 +169,7 @@ class DecoraWifiLight(LightEntity):
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to turn off myLeviton switch")
|
||||
|
||||
@Throttle(timedelta(seconds=30))
|
||||
def update(self) -> None:
|
||||
"""Fetch new state data for this switch."""
|
||||
try:
|
||||
|
||||
@@ -5,5 +5,10 @@
|
||||
"default": "mdi:chart-line"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"service": "mdi:reload"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,10 @@ from homeassistant.const import (
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_NAME,
|
||||
CONF_SOURCE,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
Platform,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
@@ -44,6 +46,7 @@ from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -53,6 +56,7 @@ from .const import (
|
||||
CONF_UNIT,
|
||||
CONF_UNIT_PREFIX,
|
||||
CONF_UNIT_TIME,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -85,6 +89,7 @@ DEFAULT_TIME_WINDOW = 0
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Required(CONF_SOURCE): cv.entity_id,
|
||||
vol.Optional(CONF_ROUND_DIGITS, default=DEFAULT_ROUND): vol.Coerce(int),
|
||||
vol.Optional(CONF_UNIT_PREFIX, default=None): vol.In(UNIT_PREFIXES),
|
||||
@@ -145,6 +150,8 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the derivative sensor."""
|
||||
await async_setup_reload_service(hass, DOMAIN, [Platform.SENSOR])
|
||||
|
||||
derivative = DerivativeSensor(
|
||||
hass,
|
||||
name=config.get(CONF_NAME),
|
||||
@@ -154,7 +161,7 @@ async def async_setup_platform(
|
||||
unit_of_measurement=config.get(CONF_UNIT),
|
||||
unit_prefix=config[CONF_UNIT_PREFIX],
|
||||
unit_time=config[CONF_UNIT_TIME],
|
||||
unique_id=None,
|
||||
unique_id=config.get(CONF_UNIQUE_ID),
|
||||
max_sub_interval=config.get(CONF_MAX_SUB_INTERVAL),
|
||||
)
|
||||
|
||||
@@ -286,14 +293,14 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
async def _handle_restore(self) -> None:
|
||||
restored_data = await self.async_get_last_sensor_data()
|
||||
if restored_data:
|
||||
self._attr_native_unit_of_measurement = (
|
||||
restored_data.native_unit_of_measurement
|
||||
)
|
||||
if self._attr_native_unit_of_measurement is None:
|
||||
# Only restore the unit if it's not assigned from YAML
|
||||
self._attr_native_unit_of_measurement = (
|
||||
restored_data.native_unit_of_measurement
|
||||
)
|
||||
try:
|
||||
self._attr_native_value = round(
|
||||
Decimal(restored_data.native_value), # type: ignore[arg-type]
|
||||
@@ -302,6 +309,11 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
except (InvalidOperation, TypeError):
|
||||
self._attr_native_value = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
await self._handle_restore()
|
||||
|
||||
source_state = self.hass.states.get(self._sensor_source_id)
|
||||
self._derive_and_set_attributes_from_state(source_state)
|
||||
|
||||
|
||||
1
homeassistant/components/derivative/services.yaml
Normal file
1
homeassistant/components/derivative/services.yaml
Normal file
@@ -0,0 +1 @@
|
||||
reload:
|
||||
@@ -58,5 +58,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"description": "Reloads derivative sensors from the YAML-configuration.",
|
||||
"name": "[%key:common::action::reload%]"
|
||||
}
|
||||
},
|
||||
"title": "Derivative sensor"
|
||||
}
|
||||
|
||||
@@ -1,259 +0,0 @@
|
||||
"""Support for Dominos Pizza ordering."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pizzapi import Address, Customer, Order
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The domain of your component. Should be equal to the name of your component.
|
||||
DOMAIN = "dominos"
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
|
||||
ATTR_COUNTRY = "country_code"
|
||||
ATTR_FIRST_NAME = "first_name"
|
||||
ATTR_LAST_NAME = "last_name"
|
||||
ATTR_EMAIL = "email"
|
||||
ATTR_PHONE = "phone"
|
||||
ATTR_ADDRESS = "address"
|
||||
ATTR_ORDERS = "orders"
|
||||
ATTR_SHOW_MENU = "show_menu"
|
||||
ATTR_ORDER_ENTITY = "order_entity_id"
|
||||
ATTR_ORDER_NAME = "name"
|
||||
ATTR_ORDER_CODES = "codes"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10)
|
||||
MIN_TIME_BETWEEN_STORE_UPDATES = timedelta(minutes=3330)
|
||||
|
||||
_ORDERS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ORDER_NAME): cv.string,
|
||||
vol.Required(ATTR_ORDER_CODES): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_COUNTRY): cv.string,
|
||||
vol.Required(ATTR_FIRST_NAME): cv.string,
|
||||
vol.Required(ATTR_LAST_NAME): cv.string,
|
||||
vol.Required(ATTR_EMAIL): cv.string,
|
||||
vol.Required(ATTR_PHONE): cv.string,
|
||||
vol.Required(ATTR_ADDRESS): cv.string,
|
||||
vol.Optional(ATTR_SHOW_MENU): cv.boolean,
|
||||
vol.Optional(ATTR_ORDERS, default=[]): vol.All(
|
||||
cv.ensure_list, [_ORDERS_SCHEMA]
|
||||
),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up is called when Home Assistant is loading our component."""
|
||||
dominos = Dominos(hass, config)
|
||||
|
||||
component = EntityComponent[DominosOrder](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DOMAIN] = {}
|
||||
entities: list[DominosOrder] = []
|
||||
conf = config[DOMAIN]
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
"order",
|
||||
dominos.handle_order,
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ORDER_ENTITY): cv.entity_ids,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
if conf.get(ATTR_SHOW_MENU):
|
||||
hass.http.register_view(DominosProductListView(dominos))
|
||||
|
||||
for order_info in conf.get(ATTR_ORDERS):
|
||||
order = DominosOrder(order_info, dominos)
|
||||
entities.append(order)
|
||||
|
||||
component.add_entities(entities)
|
||||
|
||||
# Return boolean to indicate that initialization was successfully.
|
||||
return True
|
||||
|
||||
|
||||
class Dominos:
|
||||
"""Main Dominos service."""
|
||||
|
||||
def __init__(self, hass, config):
|
||||
"""Set up main service."""
|
||||
conf = config[DOMAIN]
|
||||
|
||||
self.hass = hass
|
||||
self.customer = Customer(
|
||||
conf.get(ATTR_FIRST_NAME),
|
||||
conf.get(ATTR_LAST_NAME),
|
||||
conf.get(ATTR_EMAIL),
|
||||
conf.get(ATTR_PHONE),
|
||||
conf.get(ATTR_ADDRESS),
|
||||
)
|
||||
self.address = Address(
|
||||
*self.customer.address.split(","), country=conf.get(ATTR_COUNTRY)
|
||||
)
|
||||
self.country = conf.get(ATTR_COUNTRY)
|
||||
try:
|
||||
self.closest_store = self.address.closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self.closest_store = None
|
||||
|
||||
def handle_order(self, call: ServiceCall) -> None:
|
||||
"""Handle ordering pizza."""
|
||||
entity_ids = call.data[ATTR_ORDER_ENTITY]
|
||||
|
||||
target_orders = [
|
||||
order
|
||||
for order in self.hass.data[DOMAIN]["entities"]
|
||||
if order.entity_id in entity_ids
|
||||
]
|
||||
|
||||
for order in target_orders:
|
||||
order.place()
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_STORE_UPDATES)
|
||||
def update_closest_store(self):
|
||||
"""Update the shared closest store (if open)."""
|
||||
try:
|
||||
self.closest_store = self.address.closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self.closest_store = None
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_menu(self):
|
||||
"""Return the products from the closest stores menu."""
|
||||
self.update_closest_store()
|
||||
if self.closest_store is None:
|
||||
_LOGGER.warning("Cannot get menu. Store may be closed")
|
||||
return []
|
||||
menu = self.closest_store.get_menu()
|
||||
product_entries = []
|
||||
|
||||
for product in menu.products:
|
||||
item = {}
|
||||
if isinstance(product.menu_data["Variants"], list):
|
||||
variants = ", ".join(product.menu_data["Variants"])
|
||||
else:
|
||||
variants = product.menu_data["Variants"]
|
||||
item["name"] = product.name
|
||||
item["variants"] = variants
|
||||
product_entries.append(item)
|
||||
|
||||
return product_entries
|
||||
|
||||
|
||||
class DominosProductListView(http.HomeAssistantView):
|
||||
"""View to retrieve product list content."""
|
||||
|
||||
url = "/api/dominos"
|
||||
name = "api:dominos"
|
||||
|
||||
def __init__(self, dominos):
|
||||
"""Initialize suite view."""
|
||||
self.dominos = dominos
|
||||
|
||||
@callback
|
||||
def get(self, request):
|
||||
"""Retrieve if API is running."""
|
||||
return self.json(self.dominos.get_menu())
|
||||
|
||||
|
||||
class DominosOrder(Entity):
|
||||
"""Represents a Dominos order entity."""
|
||||
|
||||
def __init__(self, order_info, dominos):
|
||||
"""Set up the entity."""
|
||||
self._name = order_info["name"]
|
||||
self._product_codes = order_info["codes"]
|
||||
self._orderable = False
|
||||
self.dominos = dominos
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the orders name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def product_codes(self):
|
||||
"""Return the orders product codes."""
|
||||
return self._product_codes
|
||||
|
||||
@property
|
||||
def orderable(self):
|
||||
"""Return the true if orderable."""
|
||||
return self._orderable
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state either closed, orderable or unorderable."""
|
||||
if self.dominos.closest_store is None:
|
||||
return "closed"
|
||||
return "orderable" if self._orderable else "unorderable"
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Update the order state and refreshes the store."""
|
||||
try:
|
||||
self.dominos.update_closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
return
|
||||
|
||||
try:
|
||||
order = self.order()
|
||||
order.pay_with()
|
||||
self._orderable = True
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
|
||||
def order(self):
|
||||
"""Create the order object."""
|
||||
if self.dominos.closest_store is None:
|
||||
raise HomeAssistantError("No store available")
|
||||
|
||||
order = Order(
|
||||
self.dominos.closest_store,
|
||||
self.dominos.customer,
|
||||
self.dominos.address,
|
||||
self.dominos.country,
|
||||
)
|
||||
|
||||
for code in self._product_codes:
|
||||
order.add_item(code)
|
||||
|
||||
return order
|
||||
|
||||
def place(self):
|
||||
"""Place the order."""
|
||||
try:
|
||||
order = self.order()
|
||||
order.place()
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
_LOGGER.warning(
|
||||
"Attempted to order Dominos - Order invalid or store closed"
|
||||
)
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"order": {
|
||||
"service": "mdi:pizza"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"domain": "dominos",
|
||||
"name": "Dominos Pizza",
|
||||
"codeowners": [],
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dominos",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pizzapi"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pizzapi==0.0.6"]
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
order:
|
||||
fields:
|
||||
order_entity_id:
|
||||
example: dominos.medium_pan
|
||||
selector:
|
||||
text:
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"order": {
|
||||
"description": "Places a set of orders with Domino's Pizza.",
|
||||
"fields": {
|
||||
"order_entity_id": {
|
||||
"description": "The ID (as specified in the configuration) of an order to place. If provided as an array, all the identified orders will be placed.",
|
||||
"name": "Order entity"
|
||||
}
|
||||
},
|
||||
"name": "Order"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ from typing import Any, cast
|
||||
from aiohttp import ClientSession
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
@@ -18,13 +19,17 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_CONFIG_ENTRY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_TXT = "txt"
|
||||
@@ -32,7 +37,13 @@ ATTR_TXT = "txt"
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
|
||||
BACKOFF_INTERVALS = (
|
||||
INTERVAL,
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(minutes=30),
|
||||
)
|
||||
SERVICE_SET_TXT = "set_txt"
|
||||
|
||||
UPDATE_URL = "https://www.duckdns.org/update"
|
||||
@@ -49,39 +60,112 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SERVICE_TXT_SCHEMA = vol.Schema({vol.Required(ATTR_TXT): vol.Any(None, cv.string)})
|
||||
SERVICE_TXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CONFIG_ENTRY): ConfigEntrySelector(
|
||||
{
|
||||
"integration": DOMAIN,
|
||||
}
|
||||
),
|
||||
vol.Optional(ATTR_TXT): vol.Any(None, cv.string),
|
||||
}
|
||||
)
|
||||
|
||||
type DuckDnsConfigEntry = ConfigEntry
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Initialize the DuckDNS component."""
|
||||
domain: str = config[DOMAIN][CONF_DOMAIN]
|
||||
token: str = config[DOMAIN][CONF_ACCESS_TOKEN]
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_TXT,
|
||||
update_domain_service,
|
||||
schema=SERVICE_TXT_SCHEMA,
|
||||
)
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
|
||||
"""Set up Duck DNS from a config entry."""
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
async def update_domain_interval(_now: datetime) -> bool:
|
||||
"""Update the DuckDNS entry."""
|
||||
return await _update_duckdns(session, domain, token)
|
||||
return await _update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
)
|
||||
|
||||
intervals = (
|
||||
INTERVAL,
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(minutes=30),
|
||||
)
|
||||
async_track_time_interval_backoff(hass, update_domain_interval, intervals)
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
await _update_duckdns(session, domain, token, txt=call.data[ATTR_TXT])
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SET_TXT, update_domain_service, schema=SERVICE_TXT_SCHEMA
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval_backoff(
|
||||
hass, update_domain_interval, BACKOFF_INTERVALS
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_config_entry(
|
||||
hass: HomeAssistant, entry_id: str | None = None
|
||||
) -> DuckDnsConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
|
||||
if entry_id is None:
|
||||
if not (config_entries := hass.config_entries.async_entries(DOMAIN)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
if len(config_entries) != 1:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_selected",
|
||||
)
|
||||
return config_entries[0]
|
||||
|
||||
if not (entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
|
||||
entry = get_config_entry(call.hass, call.data.get(ATTR_CONFIG_ENTRY))
|
||||
|
||||
session = async_get_clientsession(call.hass)
|
||||
|
||||
await _update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
|
||||
81
homeassistant/components/duckdns/config_flow.py
Normal file
81
homeassistant/components/duckdns/config_flow.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Config flow for the Duck DNS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from . import _update_duckdns
|
||||
from .const import DOMAIN
|
||||
from .issue import deprecate_yaml_issue
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DOMAIN): TextSelector(
|
||||
TextSelectorConfig(type=TextSelectorType.TEXT, suffix=".duckdns.org")
|
||||
),
|
||||
vol.Required(CONF_ACCESS_TOKEN): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DuckDnsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Duck DNS."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_DOMAIN: user_input[CONF_DOMAIN]})
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
if not await _update_duckdns(
|
||||
session,
|
||||
user_input[CONF_DOMAIN],
|
||||
user_input[CONF_ACCESS_TOKEN],
|
||||
):
|
||||
errors["base"] = "update_failed"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_DOMAIN]}.duckdns.org", data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA, suggested_values=user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"url": "https://www.duckdns.org/"},
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import config from yaml."""
|
||||
|
||||
self._async_abort_entries_match({CONF_DOMAIN: import_info[CONF_DOMAIN]})
|
||||
result = await self.async_step_user(import_info)
|
||||
if errors := result.get("errors"):
|
||||
deprecate_yaml_issue(self.hass, import_success=False)
|
||||
return self.async_abort(reason=errors["base"])
|
||||
|
||||
deprecate_yaml_issue(self.hass, import_success=True)
|
||||
return result
|
||||
7
homeassistant/components/duckdns/const.py
Normal file
7
homeassistant/components/duckdns/const.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Constants for the Duck DNS integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
ATTR_CONFIG_ENTRY: Final = "config_entry_id"
|
||||
40
homeassistant/components/duckdns/issue.py
Normal file
40
homeassistant/components/duckdns/issue.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Issues for Duck DNS integration."""
|
||||
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def deprecate_yaml_issue(hass: HomeAssistant, *, import_success: bool) -> None:
|
||||
"""Deprecate yaml issue."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Duck DNS",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_error",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_error",
|
||||
translation_placeholders={
|
||||
"url": "/config/integrations/dashboard/add?domain=duckdns"
|
||||
},
|
||||
)
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"domain": "duckdns",
|
||||
"name": "Duck DNS",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/duckdns",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy"
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
set_txt:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: duckdns
|
||||
txt:
|
||||
required: true
|
||||
example: "This domain name is reserved for use in documentation"
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -1,8 +1,48 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"update_failed": "Updating Duck DNS failed"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"access_token": "Token",
|
||||
"domain": "Subdomain"
|
||||
},
|
||||
"data_description": {
|
||||
"access_token": "Your Duck DNS account token",
|
||||
"domain": "The Duck DNS subdomain to update"
|
||||
},
|
||||
"description": "Enter your Duck DNS subdomain and token below to configure dynamic DNS updates. You can find your token on the [Duck DNS]({url}) homepage after logging into your account."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"entry_not_found": {
|
||||
"message": "Duck DNS integration entry not found"
|
||||
},
|
||||
"entry_not_selected": {
|
||||
"message": "Duck DNS integration entry not selected"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_error": {
|
||||
"description": "Configuring Duck DNS using YAML is being removed but there was an error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Duck DNS YAML configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
|
||||
"title": "The Duck DNS YAML configuration import failed"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_txt": {
|
||||
"description": "Sets the TXT record of your DuckDNS subdomain.",
|
||||
"description": "Sets the TXT record of your Duck DNS subdomain.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The Duck DNS integration ID.",
|
||||
"name": "Integration ID"
|
||||
},
|
||||
"txt": {
|
||||
"description": "Payload for the TXT record.",
|
||||
"name": "TXT"
|
||||
|
||||
@@ -278,11 +278,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> boo
|
||||
for keypad in elk.keypads:
|
||||
keypad.add_callback(_keypad_changed)
|
||||
|
||||
sync_success = False
|
||||
try:
|
||||
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT):
|
||||
return False
|
||||
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT).async_wait()
|
||||
sync_success = True
|
||||
except LoginFailed:
|
||||
_LOGGER.error("ElkM1 login failed for %s", conf[CONF_HOST])
|
||||
return False
|
||||
except TimeoutError as exc:
|
||||
raise ConfigEntryNotReady(f"Timed out connecting to {conf[CONF_HOST]}") from exc
|
||||
finally:
|
||||
if not sync_success:
|
||||
elk.disconnect()
|
||||
|
||||
elk_temp_unit = elk.panel.temperature_units
|
||||
if elk_temp_unit == "C":
|
||||
@@ -321,48 +328,75 @@ async def async_unload_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> bo
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_wait_for_elk_to_sync(
|
||||
elk: Elk,
|
||||
login_timeout: int,
|
||||
sync_timeout: int,
|
||||
) -> bool:
|
||||
"""Wait until the elk has finished sync. Can fail login or timeout."""
|
||||
class LoginFailed(Exception):
|
||||
"""Raised when login to ElkM1 fails."""
|
||||
|
||||
sync_event = asyncio.Event()
|
||||
login_event = asyncio.Event()
|
||||
|
||||
success = True
|
||||
class ElkSyncWaiter:
|
||||
"""Wait for ElkM1 to sync."""
|
||||
|
||||
def login_status(succeeded: bool) -> None:
|
||||
nonlocal success
|
||||
def __init__(self, elk: Elk, login_timeout: int, sync_timeout: int) -> None:
|
||||
"""Initialize the sync waiter."""
|
||||
self._elk = elk
|
||||
self._login_timeout = login_timeout
|
||||
self._sync_timeout = sync_timeout
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._sync_future: asyncio.Future[None] = self._loop.create_future()
|
||||
self._login_future: asyncio.Future[None] = self._loop.create_future()
|
||||
|
||||
success = succeeded
|
||||
@callback
|
||||
def _async_set_future_if_not_done(self, future: asyncio.Future[None]) -> None:
|
||||
"""Set the future result if not already done."""
|
||||
if not future.done():
|
||||
future.set_result(None)
|
||||
|
||||
@callback
|
||||
def _async_login_status(self, succeeded: bool) -> None:
|
||||
"""Handle login status callback."""
|
||||
if succeeded:
|
||||
_LOGGER.debug("ElkM1 login succeeded")
|
||||
login_event.set()
|
||||
self._async_set_future_if_not_done(self._login_future)
|
||||
else:
|
||||
elk.disconnect()
|
||||
_LOGGER.error("ElkM1 login failed; invalid username or password")
|
||||
login_event.set()
|
||||
sync_event.set()
|
||||
self._async_set_exception_if_not_done(self._login_future, LoginFailed)
|
||||
|
||||
def sync_complete() -> None:
|
||||
sync_event.set()
|
||||
@callback
|
||||
def _async_set_exception_if_not_done(
|
||||
self, future: asyncio.Future[None], exception: type[Exception]
|
||||
) -> None:
|
||||
"""Set an exception on the future if not already done."""
|
||||
if not future.done():
|
||||
future.set_exception(exception())
|
||||
|
||||
@callback
|
||||
def _async_sync_complete(self) -> None:
|
||||
"""Handle sync complete callback."""
|
||||
self._async_set_future_if_not_done(self._sync_future)
|
||||
|
||||
async def async_wait(self) -> None:
|
||||
"""Wait for login and sync to complete.
|
||||
|
||||
Raises LoginFailed if login fails.
|
||||
Raises TimeoutError if login or sync times out.
|
||||
"""
|
||||
self._elk.add_handler("login", self._async_login_status)
|
||||
self._elk.add_handler("sync_complete", self._async_sync_complete)
|
||||
|
||||
elk.add_handler("login", login_status)
|
||||
elk.add_handler("sync_complete", sync_complete)
|
||||
for name, event, timeout in (
|
||||
("login", login_event, login_timeout),
|
||||
("sync_complete", sync_event, sync_timeout),
|
||||
):
|
||||
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
|
||||
try:
|
||||
async with asyncio.timeout(timeout):
|
||||
await event.wait()
|
||||
except TimeoutError:
|
||||
_LOGGER.debug("Timed out waiting for %s event", name)
|
||||
elk.disconnect()
|
||||
raise
|
||||
_LOGGER.debug("Received %s event", name)
|
||||
for name, future, timeout in (
|
||||
("login", self._login_future, self._login_timeout),
|
||||
("sync_complete", self._sync_future, self._sync_timeout),
|
||||
):
|
||||
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
|
||||
handle = self._loop.call_later(
|
||||
timeout, self._async_set_exception_if_not_done, future, TimeoutError
|
||||
)
|
||||
try:
|
||||
await future
|
||||
finally:
|
||||
handle.cancel()
|
||||
|
||||
return success
|
||||
_LOGGER.debug("Received %s event", name)
|
||||
finally:
|
||||
self._elk.remove_handler("login", self._async_login_status)
|
||||
self._elk.remove_handler("sync_complete", self._async_sync_complete)
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.helpers.typing import DiscoveryInfoType, VolDictType
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.network import is_ip_address
|
||||
|
||||
from . import async_wait_for_elk_to_sync, hostname_from_url
|
||||
from . import ElkSyncWaiter, LoginFailed, hostname_from_url
|
||||
from .const import CONF_AUTO_CONFIGURE, DISCOVER_SCAN_TIMEOUT, DOMAIN, LOGIN_TIMEOUT
|
||||
from .discovery import (
|
||||
_short_mac,
|
||||
@@ -89,8 +89,9 @@ async def validate_input(data: dict[str, str], mac: str | None) -> dict[str, str
|
||||
elk.connect()
|
||||
|
||||
try:
|
||||
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT):
|
||||
raise InvalidAuth
|
||||
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT).async_wait()
|
||||
except LoginFailed as exc:
|
||||
raise InvalidAuth from exc
|
||||
finally:
|
||||
elk.disconnect()
|
||||
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["elkm1_lib"],
|
||||
"requirements": ["elkm1-lib==2.2.12"]
|
||||
"requirements": ["elkm1-lib==2.2.13"]
|
||||
}
|
||||
|
||||
@@ -29,9 +29,9 @@ from homeassistant.const import (
|
||||
UnitOfVolumeFlowRate,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import template
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .config_flow import sensor_name
|
||||
from .const import CONF_ONLY_INCLUDE_FEEDID, FEED_ID, FEED_NAME, FEED_TAG
|
||||
@@ -267,7 +267,9 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
||||
self._attr_extra_state_attributes[ATTR_USERID] = elem["userid"]
|
||||
self._attr_extra_state_attributes[ATTR_LASTUPDATETIME] = elem["time"]
|
||||
self._attr_extra_state_attributes[ATTR_LASTUPDATETIMESTR] = (
|
||||
template.timestamp_local(float(elem["time"]))
|
||||
dt_util.as_local(
|
||||
dt_util.utc_from_timestamp(float(elem["time"]))
|
||||
).isoformat()
|
||||
)
|
||||
|
||||
self._attr_native_value = None
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.7.0",
|
||||
"aioesphomeapi==42.8.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -19,6 +19,9 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.0"]
|
||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
||||
}
|
||||
|
||||
@@ -11,11 +11,14 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.websocket_api import ActiveConnection
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DATA_STORAGE: HassKey[dict[str, UserStore]] = HassKey("frontend_storage")
|
||||
DATA_SYSTEM_STORAGE: HassKey[SystemStore] = HassKey("frontend_system_storage")
|
||||
STORAGE_VERSION_USER_DATA = 1
|
||||
STORAGE_VERSION_SYSTEM_DATA = 1
|
||||
|
||||
|
||||
async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
|
||||
@@ -23,6 +26,9 @@ async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_set_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_get_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_set_system_data)
|
||||
websocket_api.async_register_command(hass, websocket_get_system_data)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_system_data)
|
||||
|
||||
|
||||
async def async_user_store(hass: HomeAssistant, user_id: str) -> UserStore:
|
||||
@@ -83,6 +89,52 @@ class _UserStore(Store[dict[str, Any]]):
|
||||
)
|
||||
|
||||
|
||||
@singleton.singleton(DATA_SYSTEM_STORAGE, async_=True)
|
||||
async def async_system_store(hass: HomeAssistant) -> SystemStore:
|
||||
"""Access the system store."""
|
||||
store = SystemStore(hass)
|
||||
await store.async_load()
|
||||
return store
|
||||
|
||||
|
||||
class SystemStore:
|
||||
"""System store for frontend data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the system store."""
|
||||
self._store: Store[dict[str, Any]] = Store(
|
||||
hass,
|
||||
STORAGE_VERSION_SYSTEM_DATA,
|
||||
"frontend.system_data",
|
||||
)
|
||||
self.data: dict[str, Any] = {}
|
||||
self.subscriptions: dict[str, list[Callable[[], None]]] = {}
|
||||
|
||||
async def async_load(self) -> None:
|
||||
"""Load the data from the store."""
|
||||
self.data = await self._store.async_load() or {}
|
||||
|
||||
async def async_set_item(self, key: str, value: Any) -> None:
|
||||
"""Set an item and save the store."""
|
||||
self.data[key] = value
|
||||
self._store.async_delay_save(lambda: self.data, 1.0)
|
||||
for cb in self.subscriptions.get(key, []):
|
||||
cb()
|
||||
|
||||
@callback
|
||||
def async_subscribe(
|
||||
self, key: str, on_update_callback: Callable[[], None]
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to store updates."""
|
||||
self.subscriptions.setdefault(key, []).append(on_update_callback)
|
||||
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from the store."""
|
||||
self.subscriptions[key].remove(on_update_callback)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
|
||||
def with_user_store(
|
||||
orig_func: Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any], UserStore],
|
||||
@@ -107,6 +159,28 @@ def with_user_store(
|
||||
return with_user_store_func
|
||||
|
||||
|
||||
def with_system_store(
|
||||
orig_func: Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any], SystemStore],
|
||||
Coroutine[Any, Any, None],
|
||||
],
|
||||
) -> Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any]], Coroutine[Any, Any, None]
|
||||
]:
|
||||
"""Decorate function to provide system store."""
|
||||
|
||||
@wraps(orig_func)
|
||||
async def with_system_store_func(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Provide system store to function."""
|
||||
store = await async_system_store(hass)
|
||||
|
||||
await orig_func(hass, connection, msg, store)
|
||||
|
||||
return with_system_store_func
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/set_user_data",
|
||||
@@ -169,3 +243,65 @@ async def websocket_subscribe_user_data(
|
||||
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
|
||||
on_data_update()
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/set_system_data",
|
||||
vol.Required("key"): str,
|
||||
vol.Required("value"): vol.Any(bool, str, int, float, dict, list, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_set_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle set system data command."""
|
||||
await store.async_set_item(msg["key"], msg["value"])
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "frontend/get_system_data", vol.Required("key"): str}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_get_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle get system data command."""
|
||||
connection.send_result(msg["id"], {"value": store.data.get(msg["key"])})
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/subscribe_system_data",
|
||||
vol.Required("key"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_subscribe_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle subscribe to system data command."""
|
||||
key: str = msg["key"]
|
||||
|
||||
def on_data_update() -> None:
|
||||
"""Handle system data update."""
|
||||
connection.send_event(msg["id"], {"value": store.data.get(key)})
|
||||
|
||||
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
|
||||
on_data_update()
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"winter_mode": {
|
||||
"description": "Adds falling snowflakes on your screen. Get your home ready for winter! ❄️",
|
||||
"disable_confirmation": "Snowflakes will no longer fall on your screen. You can re-enable this at any time in labs settings.",
|
||||
"enable_confirmation": "Snowflakes will start falling on your screen. You can turn this off at any time in labs settings.",
|
||||
"name": "Winter mode"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"mode": {
|
||||
"options": {
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==13.1.0", "Pillow==12.0.0"]
|
||||
"requirements": ["av==16.0.1", "Pillow==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from secrets import token_hex
|
||||
import shutil
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from aiohttp import BasicAuth, ClientSession, UnixConnector
|
||||
from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError
|
||||
from awesomeversion import AwesomeVersion
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
@@ -35,7 +37,12 @@ from homeassistant.components.camera import (
|
||||
from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN
|
||||
from homeassistant.components.stream import Orientation
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
||||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
@@ -43,7 +50,10 @@ from homeassistant.helpers import (
|
||||
discovery_flow,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.package import is_docker_env
|
||||
@@ -52,6 +62,7 @@ from .const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_UNIX_SOCKET,
|
||||
HA_MANAGED_URL,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
@@ -60,49 +71,49 @@ from .server import Server
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_FFMPEG = "ffmpeg"
|
||||
_SUPPORTED_STREAMS = frozenset(
|
||||
(
|
||||
"bubble",
|
||||
"dvrip",
|
||||
"expr",
|
||||
_FFMPEG,
|
||||
"gopro",
|
||||
"homekit",
|
||||
"http",
|
||||
"https",
|
||||
"httpx",
|
||||
"isapi",
|
||||
"ivideon",
|
||||
"kasa",
|
||||
"nest",
|
||||
"onvif",
|
||||
"roborock",
|
||||
"rtmp",
|
||||
"rtmps",
|
||||
"rtmpx",
|
||||
"rtsp",
|
||||
"rtsps",
|
||||
"rtspx",
|
||||
"tapo",
|
||||
"tcp",
|
||||
"webrtc",
|
||||
"webtorrent",
|
||||
)
|
||||
)
|
||||
_AUTH = "auth"
|
||||
|
||||
|
||||
def _validate_auth(config: dict) -> dict:
|
||||
"""Validate that username and password are only set when a URL is configured or when debug UI is enabled."""
|
||||
auth_exists = CONF_USERNAME in config
|
||||
debug_ui_enabled = config.get(CONF_DEBUG_UI, False)
|
||||
|
||||
if debug_ui_enabled and not auth_exists:
|
||||
raise vol.Invalid("Username and password must be set when debug_ui is true")
|
||||
|
||||
if auth_exists and CONF_URL not in config and not debug_ui_enabled:
|
||||
raise vol.Invalid(
|
||||
"Username and password can only be set when a URL is configured or debug_ui is true"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url,
|
||||
vol.Exclusive(CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.boolean,
|
||||
}
|
||||
DOMAIN: vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url,
|
||||
vol.Exclusive(
|
||||
CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE
|
||||
): cv.boolean,
|
||||
vol.Inclusive(CONF_USERNAME, _AUTH): vol.All(
|
||||
cv.string, vol.Length(min=1)
|
||||
),
|
||||
vol.Inclusive(CONF_PASSWORD, _AUTH): vol.All(
|
||||
cv.string, vol.Length(min=1)
|
||||
),
|
||||
}
|
||||
),
|
||||
_validate_auth,
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_DATA_GO2RTC: HassKey[str] = HassKey(DOMAIN)
|
||||
_DATA_GO2RTC: HassKey[Go2RtcConfig] = HassKey(DOMAIN)
|
||||
_RETRYABLE_ERRORS = (ClientConnectionError, ServerConnectionError)
|
||||
type Go2RtcConfigEntry = ConfigEntry[WebRTCProvider]
|
||||
|
||||
@@ -110,12 +121,19 @@ type Go2RtcConfigEntry = ConfigEntry[WebRTCProvider]
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up WebRTC."""
|
||||
url: str | None = None
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
if DOMAIN not in config and DEFAULT_CONFIG_DOMAIN not in config:
|
||||
await _remove_go2rtc_entries(hass)
|
||||
return True
|
||||
|
||||
domain_config = config.get(DOMAIN, {})
|
||||
username = domain_config.get(CONF_USERNAME)
|
||||
password = domain_config.get(CONF_PASSWORD)
|
||||
|
||||
if not (configured_by_user := DOMAIN in config) or not (
|
||||
url := config[DOMAIN].get(CONF_URL)
|
||||
url := domain_config.get(CONF_URL)
|
||||
):
|
||||
if not is_docker_env():
|
||||
if not configured_by_user:
|
||||
@@ -128,9 +146,26 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
_LOGGER.error("Could not find go2rtc docker binary")
|
||||
return False
|
||||
|
||||
# Generate random credentials when not provided to secure the server
|
||||
if not username or not password:
|
||||
username = token_hex()
|
||||
password = token_hex()
|
||||
_LOGGER.debug("Generated random credentials for go2rtc server")
|
||||
|
||||
auth = BasicAuth(username, password)
|
||||
# HA will manage the binary
|
||||
# Manually created session (not using the helper) needs to be closed manually
|
||||
# See on_stop listener below
|
||||
session = ClientSession(
|
||||
connector=UnixConnector(path=HA_MANAGED_UNIX_SOCKET), auth=auth
|
||||
)
|
||||
server = Server(
|
||||
hass, binary, enable_ui=config.get(DOMAIN, {}).get(CONF_DEBUG_UI, False)
|
||||
hass,
|
||||
binary,
|
||||
session,
|
||||
enable_ui=domain_config.get(CONF_DEBUG_UI, False),
|
||||
username=username,
|
||||
password=password,
|
||||
)
|
||||
try:
|
||||
await server.start()
|
||||
@@ -140,12 +175,19 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def on_stop(event: Event) -> None:
|
||||
await server.stop()
|
||||
await session.close()
|
||||
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, on_stop)
|
||||
|
||||
url = HA_MANAGED_URL
|
||||
elif username and password:
|
||||
# Create session with BasicAuth if credentials are provided
|
||||
auth = BasicAuth(username, password)
|
||||
session = async_create_clientsession(hass, auth=auth)
|
||||
else:
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
hass.data[_DATA_GO2RTC] = url
|
||||
hass.data[_DATA_GO2RTC] = Go2RtcConfig(url, session)
|
||||
discovery_flow.async_create_flow(
|
||||
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||
)
|
||||
@@ -161,8 +203,9 @@ async def _remove_go2rtc_entries(hass: HomeAssistant) -> None:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bool:
|
||||
"""Set up go2rtc from a config entry."""
|
||||
|
||||
url = hass.data[_DATA_GO2RTC]
|
||||
session = async_get_clientsession(hass)
|
||||
config = hass.data[_DATA_GO2RTC]
|
||||
url = config.url
|
||||
session = config.session
|
||||
client = Go2RtcRestClient(session, url)
|
||||
# Validate the server URL
|
||||
try:
|
||||
@@ -197,6 +240,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bo
|
||||
return False
|
||||
|
||||
provider = entry.runtime_data = WebRTCProvider(hass, url, session, client)
|
||||
await provider.initialize()
|
||||
entry.async_on_unload(async_register_webrtc_provider(hass, provider))
|
||||
return True
|
||||
|
||||
@@ -228,16 +272,21 @@ class WebRTCProvider(CameraWebRTCProvider):
|
||||
self._session = session
|
||||
self._rest_client = rest_client
|
||||
self._sessions: dict[str, Go2RtcWsClient] = {}
|
||||
self._supported_schemes: set[str] = set()
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
"""Return the integration domain of the provider."""
|
||||
return DOMAIN
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the provider."""
|
||||
self._supported_schemes = await self._rest_client.schemes.list()
|
||||
|
||||
@callback
|
||||
def async_is_supported(self, stream_source: str) -> bool:
|
||||
"""Return if this provider is supports the Camera as source."""
|
||||
return stream_source.partition(":")[0] in _SUPPORTED_STREAMS
|
||||
return stream_source.partition(":")[0] in self._supported_schemes
|
||||
|
||||
async def async_handle_async_webrtc_offer(
|
||||
self,
|
||||
@@ -365,3 +414,11 @@ class WebRTCProvider(CameraWebRTCProvider):
|
||||
for ws_client in self._sessions.values():
|
||||
await ws_client.close()
|
||||
self._sessions.clear()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Go2RtcConfig:
|
||||
"""Go2rtc configuration."""
|
||||
|
||||
url: str
|
||||
session: ClientSession
|
||||
|
||||
@@ -6,4 +6,7 @@ CONF_DEBUG_UI = "debug_ui"
|
||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
RECOMMENDED_VERSION = "1.9.11"
|
||||
HA_MANAGED_UNIX_SOCKET = "/run/go2rtc.sock"
|
||||
# When changing this version, also update the corresponding SHA hash (_GO2RTC_SHA)
|
||||
# in script/hassfest/docker.py.
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["go2rtc-client==0.2.1"],
|
||||
"requirements": ["go2rtc-client==0.3.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@ from contextlib import suppress
|
||||
import logging
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_URL
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_UNIX_SOCKET, HA_MANAGED_URL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_TERMINATE_TIMEOUT = 5
|
||||
@@ -23,14 +23,30 @@ _LOG_BUFFER_SIZE = 512
|
||||
_RESPAWN_COOLDOWN = 1
|
||||
|
||||
# Default configuration for HA
|
||||
# - Api is listening only on localhost
|
||||
# - Unix socket for secure local communication
|
||||
# - Basic auth enabled, including local connections
|
||||
# - HTTP API only enabled when UI is enabled
|
||||
# - Enable rtsp for localhost only as ffmpeg needs it
|
||||
# - Clear default ice servers
|
||||
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
||||
# Do not edit it manually
|
||||
|
||||
app:
|
||||
modules: {app_modules}
|
||||
|
||||
api:
|
||||
listen: "{api_ip}:{api_port}"
|
||||
listen: "{listen_config}"
|
||||
unix_listen: "{unix_socket}"
|
||||
allow_paths: {api_allow_paths}
|
||||
local_auth: true
|
||||
username: {username}
|
||||
password: {password}
|
||||
|
||||
# ffmpeg needs the exec module
|
||||
# Restrict execution to only ffmpeg binary
|
||||
exec:
|
||||
allow_paths:
|
||||
- ffmpeg
|
||||
|
||||
rtsp:
|
||||
listen: "127.0.0.1:18554"
|
||||
@@ -40,6 +56,43 @@ webrtc:
|
||||
ice_servers: []
|
||||
"""
|
||||
|
||||
_APP_MODULES = (
|
||||
"api",
|
||||
"exec", # Execution module for ffmpeg
|
||||
"ffmpeg",
|
||||
"http",
|
||||
"mjpeg",
|
||||
"onvif",
|
||||
"rtmp",
|
||||
"rtsp",
|
||||
"srtp",
|
||||
"webrtc",
|
||||
"ws",
|
||||
)
|
||||
|
||||
_API_ALLOW_PATHS = (
|
||||
"/", # UI static page and version control
|
||||
"/api", # Main API path
|
||||
"/api/frame.jpeg", # Snapshot functionality
|
||||
"/api/schemes", # Supported stream schemes
|
||||
"/api/streams", # Stream management
|
||||
"/api/webrtc", # Webrtc functionality
|
||||
"/api/ws", # Websocket functionality (e.g. webrtc candidates)
|
||||
)
|
||||
|
||||
# Additional modules when UI is enabled
|
||||
_UI_APP_MODULES = (
|
||||
*_APP_MODULES,
|
||||
"debug",
|
||||
)
|
||||
# Additional api paths when UI is enabled
|
||||
_UI_API_ALLOW_PATHS = (
|
||||
*_API_ALLOW_PATHS,
|
||||
"/api/config", # UI config view
|
||||
"/api/log", # UI log view
|
||||
"/api/streams.dot", # UI network view
|
||||
)
|
||||
|
||||
_LOG_LEVEL_MAP = {
|
||||
"TRC": logging.DEBUG,
|
||||
"DBG": logging.DEBUG,
|
||||
@@ -61,14 +114,40 @@ class Go2RTCWatchdogError(HomeAssistantError):
|
||||
"""Raised on watchdog error."""
|
||||
|
||||
|
||||
def _create_temp_file(api_ip: str) -> str:
|
||||
def _format_list_for_yaml(items: tuple[str, ...]) -> str:
|
||||
"""Format a list of strings for yaml config."""
|
||||
if not items:
|
||||
return "[]"
|
||||
formatted_items = ",".join(f'"{item}"' for item in items)
|
||||
return f"[{formatted_items}]"
|
||||
|
||||
|
||||
def _create_temp_file(enable_ui: bool, username: str, password: str) -> str:
|
||||
"""Create temporary config file."""
|
||||
app_modules: tuple[str, ...] = _APP_MODULES
|
||||
api_paths: tuple[str, ...] = _API_ALLOW_PATHS
|
||||
|
||||
if enable_ui:
|
||||
app_modules = _UI_APP_MODULES
|
||||
api_paths = _UI_API_ALLOW_PATHS
|
||||
# Listen on all interfaces for allowing access from all ips
|
||||
listen_config = f":{HA_MANAGED_API_PORT}"
|
||||
else:
|
||||
# Disable HTTP listening when UI is not enabled
|
||||
# as HA does not use it.
|
||||
listen_config = ""
|
||||
|
||||
# Set delete=False to prevent the file from being deleted when the file is closed
|
||||
# Linux is clearing tmp folder on reboot, so no need to delete it manually
|
||||
with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file:
|
||||
file.write(
|
||||
_GO2RTC_CONFIG_FORMAT.format(
|
||||
api_ip=api_ip, api_port=HA_MANAGED_API_PORT
|
||||
listen_config=listen_config,
|
||||
unix_socket=HA_MANAGED_UNIX_SOCKET,
|
||||
app_modules=_format_list_for_yaml(app_modules),
|
||||
api_allow_paths=_format_list_for_yaml(api_paths),
|
||||
username=username,
|
||||
password=password,
|
||||
).encode()
|
||||
)
|
||||
return file.name
|
||||
@@ -78,18 +157,25 @@ class Server:
|
||||
"""Go2rtc server."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, binary: str, *, enable_ui: bool = False
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
binary: str,
|
||||
session: ClientSession,
|
||||
*,
|
||||
enable_ui: bool = False,
|
||||
username: str,
|
||||
password: str,
|
||||
) -> None:
|
||||
"""Initialize the server."""
|
||||
self._hass = hass
|
||||
self._binary = binary
|
||||
self._session = session
|
||||
self._enable_ui = enable_ui
|
||||
self._username = username
|
||||
self._password = password
|
||||
self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE)
|
||||
self._process: asyncio.subprocess.Process | None = None
|
||||
self._startup_complete = asyncio.Event()
|
||||
self._api_ip = _LOCALHOST_IP
|
||||
if enable_ui:
|
||||
# Listen on all interfaces for allowing access from all ips
|
||||
self._api_ip = ""
|
||||
self._watchdog_task: asyncio.Task | None = None
|
||||
self._watchdog_tasks: list[asyncio.Task] = []
|
||||
|
||||
@@ -104,7 +190,7 @@ class Server:
|
||||
"""Start the server."""
|
||||
_LOGGER.debug("Starting go2rtc server")
|
||||
config_file = await self._hass.async_add_executor_job(
|
||||
_create_temp_file, self._api_ip
|
||||
_create_temp_file, self._enable_ui, self._username, self._password
|
||||
)
|
||||
|
||||
self._startup_complete.clear()
|
||||
@@ -133,7 +219,7 @@ class Server:
|
||||
raise Go2RTCServerStartError from err
|
||||
|
||||
# Check the server version
|
||||
client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL)
|
||||
client = Go2RtcRestClient(self._session, HA_MANAGED_URL)
|
||||
await client.validate_server_version()
|
||||
|
||||
async def _log_output(self, process: asyncio.subprocess.Process) -> None:
|
||||
@@ -205,7 +291,7 @@ class Server:
|
||||
|
||||
async def _monitor_api(self) -> None:
|
||||
"""Raise if the go2rtc process terminates."""
|
||||
client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL)
|
||||
client = Go2RtcRestClient(self._session, HA_MANAGED_URL)
|
||||
|
||||
_LOGGER.debug("Monitoring go2rtc API")
|
||||
try:
|
||||
|
||||
@@ -97,7 +97,8 @@ SENSOR_DESCRIPTIONS = [
|
||||
key="duration",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -174,7 +175,7 @@ class GoogleTravelTimeSensor(SensorEntity):
|
||||
if self._route is None:
|
||||
return None
|
||||
|
||||
return round(self._route.duration.seconds / 60)
|
||||
return self._route.duration.seconds
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
|
||||
@@ -20,7 +20,7 @@ from .coordinator import (
|
||||
GoogleWeatherSubEntryRuntimeData,
|
||||
)
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.WEATHER]
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WEATHER]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -16,10 +16,15 @@ class GoogleWeatherBaseEntity(Entity):
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self, config_entry: GoogleWeatherConfigEntry, subentry: ConfigSubentry
|
||||
self,
|
||||
config_entry: GoogleWeatherConfigEntry,
|
||||
subentry: ConfigSubentry,
|
||||
unique_id_suffix: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
if unique_id_suffix is not None:
|
||||
self._attr_unique_id += f"_{unique_id_suffix.lower()}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
|
||||
27
homeassistant/components/google_weather/icons.json
Normal file
27
homeassistant/components/google_weather/icons.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"cloud_coverage": {
|
||||
"default": "mdi:weather-cloudy"
|
||||
},
|
||||
"precipitation_probability": {
|
||||
"default": "mdi:weather-rainy"
|
||||
},
|
||||
"precipitation_qpf": {
|
||||
"default": "mdi:cup-water"
|
||||
},
|
||||
"thunderstorm_probability": {
|
||||
"default": "mdi:weather-lightning"
|
||||
},
|
||||
"uv_index": {
|
||||
"default": "mdi:weather-sunny-alert"
|
||||
},
|
||||
"visibility": {
|
||||
"default": "mdi:eye"
|
||||
},
|
||||
"weather_condition": {
|
||||
"default": "mdi:card-text-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
233
homeassistant/components/google_weather/sensor.py
Normal file
233
homeassistant/components/google_weather/sensor.py
Normal file
@@ -0,0 +1,233 @@
|
||||
"""Support for Google Weather sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from google_weather_api import CurrentConditionsResponse
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import (
|
||||
DEGREE,
|
||||
PERCENTAGE,
|
||||
UV_INDEX,
|
||||
UnitOfLength,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
UnitOfVolumetricFlux,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import (
|
||||
GoogleWeatherConfigEntry,
|
||||
GoogleWeatherCurrentConditionsCoordinator,
|
||||
)
|
||||
from .entity import GoogleWeatherBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GoogleWeatherSensorDescription(SensorEntityDescription):
|
||||
"""Class describing Google Weather sensor entities."""
|
||||
|
||||
value_fn: Callable[[CurrentConditionsResponse], str | int | float | None]
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[GoogleWeatherSensorDescription, ...] = (
|
||||
GoogleWeatherSensorDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: data.temperature.degrees,
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="feelsLikeTemperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: data.feels_like_temperature.degrees,
|
||||
translation_key="apparent_temperature",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="dewPoint",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: data.dew_point.degrees,
|
||||
translation_key="dew_point",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="heatIndex",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: data.heat_index.degrees,
|
||||
translation_key="heat_index",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="windChill",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: data.wind_chill.degrees,
|
||||
translation_key="wind_chill",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="relativeHumidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: data.relative_humidity,
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="uvIndex",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data: data.uv_index,
|
||||
translation_key="uv_index",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="precipitation_probability",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: data.precipitation.probability.percent,
|
||||
translation_key="precipitation_probability",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="precipitation_qpf",
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
value_fn=lambda data: data.precipitation.qpf.quantity,
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="thunderstormProbability",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: data.thunderstorm_probability,
|
||||
translation_key="thunderstorm_probability",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="airPressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement=UnitOfPressure.HPA,
|
||||
value_fn=lambda data: data.air_pressure.mean_sea_level_millibars,
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="wind_direction",
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
native_unit_of_measurement=DEGREE,
|
||||
value_fn=lambda data: data.wind.direction.degrees,
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="wind_speed",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: data.wind.speed.value,
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="wind_gust",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: data.wind.gust.value,
|
||||
translation_key="wind_gust_speed",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="visibility",
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfLength.KILOMETERS,
|
||||
value_fn=lambda data: data.visibility.distance,
|
||||
translation_key="visibility",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="cloudCover",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: data.cloud_cover,
|
||||
translation_key="cloud_coverage",
|
||||
),
|
||||
GoogleWeatherSensorDescription(
|
||||
key="weatherCondition",
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.weather_condition.description.text,
|
||||
translation_key="weather_condition",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: GoogleWeatherConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add Google Weather entities from a config_entry."""
|
||||
for subentry in entry.subentries.values():
|
||||
subentry_runtime_data = entry.runtime_data.subentries_runtime_data[
|
||||
subentry.subentry_id
|
||||
]
|
||||
coordinator = subentry_runtime_data.coordinator_observation
|
||||
async_add_entities(
|
||||
(
|
||||
GoogleWeatherSensor(coordinator, subentry, description)
|
||||
for description in SENSOR_TYPES
|
||||
if description.value_fn(coordinator.data) is not None
|
||||
),
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class GoogleWeatherSensor(
|
||||
CoordinatorEntity[GoogleWeatherCurrentConditionsCoordinator],
|
||||
GoogleWeatherBaseEntity,
|
||||
SensorEntity,
|
||||
):
|
||||
"""Define a Google Weather entity."""
|
||||
|
||||
entity_description: GoogleWeatherSensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: GoogleWeatherCurrentConditionsCoordinator,
|
||||
subentry: ConfigSubentry,
|
||||
description: GoogleWeatherSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
GoogleWeatherBaseEntity.__init__(
|
||||
self, coordinator.config_entry, subentry, description.key
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int | float | None:
|
||||
"""Return the state."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
@@ -61,5 +61,42 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"apparent_temperature": {
|
||||
"name": "Apparent temperature"
|
||||
},
|
||||
"cloud_coverage": {
|
||||
"name": "Cloud coverage"
|
||||
},
|
||||
"dew_point": {
|
||||
"name": "Dew point"
|
||||
},
|
||||
"heat_index": {
|
||||
"name": "Heat index temperature"
|
||||
},
|
||||
"precipitation_probability": {
|
||||
"name": "Precipitation probability"
|
||||
},
|
||||
"thunderstorm_probability": {
|
||||
"name": "Thunderstorm probability"
|
||||
},
|
||||
"uv_index": {
|
||||
"name": "UV index"
|
||||
},
|
||||
"visibility": {
|
||||
"name": "Visibility"
|
||||
},
|
||||
"weather_condition": {
|
||||
"name": "Weather condition"
|
||||
},
|
||||
"wind_chill": {
|
||||
"name": "Wind chill temperature"
|
||||
},
|
||||
"wind_gust_speed": {
|
||||
"name": "Wind gust speed"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
54
homeassistant/components/hanna/__init__.py
Normal file
54
homeassistant/components/hanna/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""The Hanna Instruments integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from hanna_cloud import HannaCloudClient
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import HannaConfigEntry, HannaDataCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
def _authenticate_and_get_devices(
|
||||
api_client: HannaCloudClient,
|
||||
email: str,
|
||||
password: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Authenticate and get devices in a single executor job."""
|
||||
api_client.authenticate(email, password)
|
||||
return api_client.get_devices()
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HannaConfigEntry) -> bool:
|
||||
"""Set up Hanna Instruments from a config entry."""
|
||||
api_client = HannaCloudClient()
|
||||
devices = await hass.async_add_executor_job(
|
||||
_authenticate_and_get_devices,
|
||||
api_client,
|
||||
entry.data[CONF_EMAIL],
|
||||
entry.data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
# Create device coordinators
|
||||
device_coordinators = {}
|
||||
for device in devices:
|
||||
coordinator = HannaDataCoordinator(hass, entry, device, api_client)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
device_coordinators[coordinator.device_identifier] = coordinator
|
||||
|
||||
# Set runtime data
|
||||
entry.runtime_data = device_coordinators
|
||||
|
||||
# Forward the setup to the platforms
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: HannaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
62
homeassistant/components/hanna/config_flow.py
Normal file
62
homeassistant/components/hanna/config_flow.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""Config flow for Hanna Instruments integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hanna_cloud import AuthenticationError, HannaCloudClient
|
||||
from requests.exceptions import ConnectionError as RequestsConnectionError, Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HannaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Hanna Instruments."""
|
||||
|
||||
VERSION = 1
|
||||
data_schema = vol.Schema(
|
||||
{vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str}
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the setup flow."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||
self._abort_if_unique_id_configured()
|
||||
client = HannaCloudClient()
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
client.authenticate,
|
||||
user_input[CONF_EMAIL],
|
||||
user_input[CONF_PASSWORD],
|
||||
)
|
||||
except (Timeout, RequestsConnectionError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL],
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
self.data_schema, user_input
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
3
homeassistant/components/hanna/const.py
Normal file
3
homeassistant/components/hanna/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Constants for the Hanna integration."""
|
||||
|
||||
DOMAIN = "hanna"
|
||||
72
homeassistant/components/hanna/coordinator.py
Normal file
72
homeassistant/components/hanna/coordinator.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Hanna Instruments data coordinator for Home Assistant.
|
||||
|
||||
This module provides the data coordinator for fetching and managing Hanna Instruments
|
||||
sensor data.
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hanna_cloud import HannaCloudClient
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type HannaConfigEntry = ConfigEntry[dict[str, HannaDataCoordinator]]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HannaDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Coordinator for fetching Hanna sensor data."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: HannaConfigEntry,
|
||||
device: dict[str, Any],
|
||||
api_client: HannaCloudClient,
|
||||
) -> None:
|
||||
"""Initialize the Hanna data coordinator."""
|
||||
self.api_client = api_client
|
||||
self.device_data = device
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"{DOMAIN}_{self.device_identifier}",
|
||||
config_entry=config_entry,
|
||||
update_interval=timedelta(seconds=30),
|
||||
)
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str:
|
||||
"""Return the device identifier."""
|
||||
return self.device_data["DID"]
|
||||
|
||||
def get_parameters(self) -> list[dict[str, Any]]:
|
||||
"""Get all parameters from the sensor data."""
|
||||
return self.api_client.parameters
|
||||
|
||||
def get_parameter_value(self, key: str) -> Any:
|
||||
"""Get the value for a specific parameter."""
|
||||
for parameter in self.get_parameters():
|
||||
if parameter["name"] == key:
|
||||
return parameter["value"]
|
||||
return None
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch latest sensor data from the Hanna API."""
|
||||
try:
|
||||
readings = await self.hass.async_add_executor_job(
|
||||
self.api_client.get_last_device_reading, self.device_identifier
|
||||
)
|
||||
except RequestException as e:
|
||||
raise UpdateFailed(f"Error communicating with Hanna API: {e}") from e
|
||||
except (KeyError, IndexError) as e:
|
||||
raise UpdateFailed(f"Error parsing Hanna API response: {e}") from e
|
||||
return readings
|
||||
28
homeassistant/components/hanna/entity.py
Normal file
28
homeassistant/components/hanna/entity.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Hanna Instruments entity base class for Home Assistant.
|
||||
|
||||
This module provides the base entity class for Hanna Instruments entities.
|
||||
"""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HannaDataCoordinator
|
||||
|
||||
|
||||
class HannaEntity(CoordinatorEntity[HannaDataCoordinator]):
|
||||
"""Base class for Hanna entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: HannaDataCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.device_identifier)},
|
||||
manufacturer=coordinator.device_data.get("manufacturer"),
|
||||
model=coordinator.device_data.get("DM"),
|
||||
name=coordinator.device_data.get("name"),
|
||||
serial_number=coordinator.device_data.get("serial_number"),
|
||||
sw_version=coordinator.device_data.get("sw_version"),
|
||||
)
|
||||
10
homeassistant/components/hanna/manifest.json
Normal file
10
homeassistant/components/hanna/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "hanna",
|
||||
"name": "Hanna",
|
||||
"codeowners": ["@bestycame"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hanna",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["hanna-cloud==0.0.6"]
|
||||
}
|
||||
70
homeassistant/components/hanna/quality_scale.yaml
Normal file
70
homeassistant/components/hanna/quality_scale.yaml
Normal file
@@ -0,0 +1,70 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't add actions.
|
||||
appropriate-polling:
|
||||
status: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any configuration parameters.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
106
homeassistant/components/hanna/sensor.py
Normal file
106
homeassistant/components/hanna/sensor.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""Hanna Instruments sensor integration for Home Assistant.
|
||||
|
||||
This module provides sensor entities for various Hanna Instruments devices,
|
||||
including pH, ORP, temperature, and chemical sensors. It uses the Hanna API
|
||||
to fetch readings and updates them periodically.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfElectricPotential, UnitOfTemperature, UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import HannaConfigEntry, HannaDataCoordinator
|
||||
from .entity import HannaEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
SensorEntityDescription(
|
||||
key="ph",
|
||||
translation_key="ph_value",
|
||||
device_class=SensorDeviceClass.PH,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="orp",
|
||||
translation_key="chlorine_orp_value",
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="temp",
|
||||
translation_key="water_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="airTemp",
|
||||
translation_key="air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="acidBase",
|
||||
translation_key="ph_acid_base_flow_rate",
|
||||
icon="mdi:chemical-weapon",
|
||||
device_class=SensorDeviceClass.VOLUME,
|
||||
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="cl",
|
||||
translation_key="chlorine_flow_rate",
|
||||
icon="mdi:chemical-weapon",
|
||||
device_class=SensorDeviceClass.VOLUME,
|
||||
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HannaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Hanna sensors from a config entry."""
|
||||
device_coordinators = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
HannaSensor(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
for coordinator in device_coordinators.values()
|
||||
)
|
||||
|
||||
|
||||
class HannaSensor(HannaEntity, SensorEntity):
|
||||
"""Representation of a Hanna sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HannaDataCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize a Hanna sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.device_identifier}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the value reported by the sensor."""
|
||||
return self.coordinator.get_parameter_value(self.entity_description.key)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user