mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 11:59:37 +00:00
Compare commits
510 Commits
async_curr
...
handle-tim
Author | SHA1 | Date | |
---|---|---|---|
![]() |
30d27f9836 | ||
![]() |
96aefe5849 | ||
![]() |
72e608918b | ||
![]() |
86db60c442 | ||
![]() |
25806615a9 | ||
![]() |
a0f67381e5 | ||
![]() |
90bfadda9b | ||
![]() |
0f8e700965 | ||
![]() |
21d4ed2837 | ||
![]() |
ce363b3835 | ||
![]() |
dd3e6b8df5 | ||
![]() |
abbf8390ac | ||
![]() |
689039959c | ||
![]() |
52c25cfc88 | ||
![]() |
00b2017767 | ||
![]() |
dd7f7be6ad | ||
![]() |
22709506c6 | ||
![]() |
f0c0492375 | ||
![]() |
58459cb80f | ||
![]() |
a19e378447 | ||
![]() |
38a5a3ed4b | ||
![]() |
e76bed4a83 | ||
![]() |
d73309ba60 | ||
![]() |
19fdea024c | ||
![]() |
a3cfd7f707 | ||
![]() |
3dd941eff7 | ||
![]() |
d389141aee | ||
![]() |
3c542b8d43 | ||
![]() |
2367df89d9 | ||
![]() |
7bfdfb3fc7 | ||
![]() |
485916265a | ||
![]() |
1bb3c96fc1 | ||
![]() |
4eaf6784af | ||
![]() |
7b7265a6b0 | ||
![]() |
9059e3dadc | ||
![]() |
d9d42b3ad5 | ||
![]() |
d565fb3cb4 | ||
![]() |
6e93e480d1 | ||
![]() |
5a3570702d | ||
![]() |
b26b1df143 | ||
![]() |
fdbff76733 | ||
![]() |
018d59a892 | ||
![]() |
4b6dd0eb8f | ||
![]() |
b7db87bd3d | ||
![]() |
86dc453c55 | ||
![]() |
a4f2c88c7f | ||
![]() |
3cdb894e61 | ||
![]() |
cb837aaae5 | ||
![]() |
82443ded34 | ||
![]() |
71cc3b7fcd | ||
![]() |
e5658f9747 | ||
![]() |
868ded141f | ||
![]() |
1151fa698d | ||
![]() |
2796d6110a | ||
![]() |
844b97bd32 | ||
![]() |
286b2500bd | ||
![]() |
4b7746ab51 | ||
![]() |
ca1c366f4f | ||
![]() |
de42ac14ac | ||
![]() |
7f7bd5a97f | ||
![]() |
8a70a1badb | ||
![]() |
181741cab6 | ||
![]() |
1e14fb6dab | ||
![]() |
2b6a125927 | ||
![]() |
e61ad10708 | ||
![]() |
5177f9e8c2 | ||
![]() |
850aeeb5eb | ||
![]() |
a1b9061060 | ||
![]() |
0ec1f27489 | ||
![]() |
befc93bc73 | ||
![]() |
1526d953bf | ||
![]() |
d38082a5c8 | ||
![]() |
42850421d2 | ||
![]() |
21a835c4b4 | ||
![]() |
e9294dbf72 | ||
![]() |
5c4dfbff1b | ||
![]() |
abe628506d | ||
![]() |
12cc0ed18d | ||
![]() |
8ca7562390 | ||
![]() |
942f7eebb1 | ||
![]() |
1a167e6aee | ||
![]() |
9531ae10f2 | ||
![]() |
bfc9616abf | ||
![]() |
054a5d751a | ||
![]() |
a43ba4f966 | ||
![]() |
1a5cae125f | ||
![]() |
f3b9bda876 | ||
![]() |
3f3aaa2815 | ||
![]() |
6dc7870779 | ||
![]() |
be83416c72 | ||
![]() |
c745ee18eb | ||
![]() |
cf907ae196 | ||
![]() |
8eee53036a | ||
![]() |
b37237d24b | ||
![]() |
950e758b62 | ||
![]() |
9cd940b7df | ||
![]() |
10b186a20d | ||
![]() |
757aec1c6b | ||
![]() |
0b159bdb9c | ||
![]() |
8728312e87 | ||
![]() |
bbb67db354 | ||
![]() |
265f5da21a | ||
![]() |
54859e8a83 | ||
![]() |
c87dba878d | ||
![]() |
8d8e008123 | ||
![]() |
b30667a469 | ||
![]() |
8920c548d5 | ||
![]() |
eac719f9af | ||
![]() |
71c274cb91 | ||
![]() |
d4902361e6 | ||
![]() |
f63eee3889 | ||
![]() |
21bfe610d1 | ||
![]() |
21c174e895 | ||
![]() |
ec148e0459 | ||
![]() |
286763b998 | ||
![]() |
5f88122a2b | ||
![]() |
31968d16ab | ||
![]() |
c125554817 | ||
![]() |
10f2955d34 | ||
![]() |
55712b784c | ||
![]() |
fe3a929556 | ||
![]() |
534801e80d | ||
![]() |
8aeda5a0c0 | ||
![]() |
eb1cbbc75c | ||
![]() |
fa8a4d7098 | ||
![]() |
2623ebac4d | ||
![]() |
1746c51ce4 | ||
![]() |
8b984a2105 | ||
![]() |
ebee370a56 | ||
![]() |
dabd096587 | ||
![]() |
21399818af | ||
![]() |
4354214fbf | ||
![]() |
5bd39804f1 | ||
![]() |
6d3ad3ab9c | ||
![]() |
4c212bdcd4 | ||
![]() |
b91b39580f | ||
![]() |
472d70b6c9 | ||
![]() |
017a84a859 | ||
![]() |
d184540967 | ||
![]() |
1740984b3b | ||
![]() |
4db8592c61 | ||
![]() |
27e630c107 | ||
![]() |
ea8833342d | ||
![]() |
87be2ba823 | ||
![]() |
51c35eb631 | ||
![]() |
24a86d042f | ||
![]() |
cd6f653123 | ||
![]() |
fd05ddca28 | ||
![]() |
a1f2eb44ae | ||
![]() |
c4ddc03dbc | ||
![]() |
9db5aafb71 | ||
![]() |
64cdcfb613 | ||
![]() |
c761ce699c | ||
![]() |
40ebce4ae8 | ||
![]() |
29914d6722 | ||
![]() |
5eef6edded | ||
![]() |
db729273a5 | ||
![]() |
946d75d651 | ||
![]() |
093f779edb | ||
![]() |
87658e77a7 | ||
![]() |
38f65cda98 | ||
![]() |
797c6ddedd | ||
![]() |
fe8a53407a | ||
![]() |
ae5f57fd99 | ||
![]() |
a93c3cc23c | ||
![]() |
804b42e1fb | ||
![]() |
a4f15e4840 | ||
![]() |
2471177c84 | ||
![]() |
a494d3ec69 | ||
![]() |
b10a9721a7 | ||
![]() |
04c0bb20d6 | ||
![]() |
1598c4ebe8 | ||
![]() |
d67ec7593a | ||
![]() |
4a4c124181 | ||
![]() |
c34af4be86 | ||
![]() |
823071b722 | ||
![]() |
462fa77ba1 | ||
![]() |
24fc8b9297 | ||
![]() |
2596ab2940 | ||
![]() |
23fa84e20e | ||
![]() |
7f13141297 | ||
![]() |
770f41d079 | ||
![]() |
df16e85359 | ||
![]() |
3c6db923a3 | ||
![]() |
450c47f932 | ||
![]() |
048f64eccf | ||
![]() |
c4c523e8b7 | ||
![]() |
87e30e0907 | ||
![]() |
74660da2d2 | ||
![]() |
6b8c180509 | ||
![]() |
eb4a873c43 | ||
![]() |
6aafa666d6 | ||
![]() |
9ee9bb368d | ||
![]() |
6e4258c8a9 | ||
![]() |
d65e704823 | ||
![]() |
aadaf87c16 | ||
![]() |
e70b147c0c | ||
![]() |
031b12752f | ||
![]() |
df0cfd69a9 | ||
![]() |
b2c53f2d78 | ||
![]() |
3649e949b1 | ||
![]() |
de7e2303a7 | ||
![]() |
892f3f267b | ||
![]() |
0254285285 | ||
![]() |
44a95242dc | ||
![]() |
f9b1c52d65 | ||
![]() |
aa8d78622c | ||
![]() |
ca6289a576 | ||
![]() |
0f372f4b47 | ||
![]() |
4bba167ab3 | ||
![]() |
962c0c443d | ||
![]() |
c6b4cac28a | ||
![]() |
3c7e3a5e30 | ||
![]() |
fa698956c3 | ||
![]() |
32f136b12f | ||
![]() |
e1f617df25 | ||
![]() |
84f1b8a5cc | ||
![]() |
e9cedf4852 | ||
![]() |
9c72b40ab4 | ||
![]() |
65f655e5f5 | ||
![]() |
af28573894 | ||
![]() |
c5fc1de3df | ||
![]() |
1df1144eb9 | ||
![]() |
d51c0e3752 | ||
![]() |
f5157878c2 | ||
![]() |
fb723571b6 | ||
![]() |
dbf80c3ce3 | ||
![]() |
e0a774b598 | ||
![]() |
168afc5f0e | ||
![]() |
af23670854 | ||
![]() |
935ce421df | ||
![]() |
c60ad8179d | ||
![]() |
14ad3364e3 | ||
![]() |
e229f36648 | ||
![]() |
f4f99e015c | ||
![]() |
5dc509cba0 | ||
![]() |
75597ac98d | ||
![]() |
b503f792b5 | ||
![]() |
410c3df6dd | ||
![]() |
f1bf28df18 | ||
![]() |
99fb64af9b | ||
![]() |
c0af0159e3 | ||
![]() |
71749da3a3 | ||
![]() |
b01be94034 | ||
![]() |
47ec8b7f12 | ||
![]() |
93ec9e448e | ||
![]() |
90bc41dd02 | ||
![]() |
410d869f3d | ||
![]() |
d75d9f2589 | ||
![]() |
afbb832a57 | ||
![]() |
bdc881c87a | ||
![]() |
22ea269ed8 | ||
![]() |
10fecbaf4d | ||
![]() |
cbdc1dc5b6 | ||
![]() |
b203a831c9 | ||
![]() |
5ccbee4c9a | ||
![]() |
1483c9488f | ||
![]() |
f5535db24c | ||
![]() |
e40ecdfb00 | ||
![]() |
2f4c69bbd5 | ||
![]() |
dd0f6a702b | ||
![]() |
5ba580bc25 | ||
![]() |
c13002bdd5 | ||
![]() |
75d22191a0 | ||
![]() |
58d6549f1c | ||
![]() |
1fcc6df1fd | ||
![]() |
9bf467e6d1 | ||
![]() |
d877d6d93f | ||
![]() |
d2b255ba92 | ||
![]() |
1509c429d6 | ||
![]() |
af9717c1cd | ||
![]() |
49e75c9cf8 | ||
![]() |
c97f16a96d | ||
![]() |
a3a4433d62 | ||
![]() |
f832002afd | ||
![]() |
dbc7f2b43c | ||
![]() |
1cd3a1eede | ||
![]() |
7d6e0d44b0 | ||
![]() |
2bb6d745ca | ||
![]() |
beb9d7856c | ||
![]() |
6a4c8a550a | ||
![]() |
7d23752a3f | ||
![]() |
c2b2a78db5 | ||
![]() |
0fb6bbee59 | ||
![]() |
d93e0a105a | ||
![]() |
ab1619c0b4 | ||
![]() |
70df7b8503 | ||
![]() |
0e2c2ad355 | ||
![]() |
4c26718739 | ||
![]() |
96034e1525 | ||
![]() |
df1302fc1c | ||
![]() |
5a5b639aa4 | ||
![]() |
e9fbe2227f | ||
![]() |
82b57568a0 | ||
![]() |
be692ab2fd | ||
![]() |
24c04cceee | ||
![]() |
97077898bb | ||
![]() |
08485f4e09 | ||
![]() |
b64d60fce4 | ||
![]() |
3690497e1f | ||
![]() |
3499ed7a98 | ||
![]() |
2c809d5903 | ||
![]() |
40988198f3 | ||
![]() |
b87e581cde | ||
![]() |
f1c55ee7e2 | ||
![]() |
9f17a82acf | ||
![]() |
3955391cda | ||
![]() |
d9a757c7e6 | ||
![]() |
aa1ec944c0 | ||
![]() |
88c3b6a9f5 | ||
![]() |
ada73953f6 | ||
![]() |
42e9b9a0bc | ||
![]() |
ec6a052ff5 | ||
![]() |
c91d64e04d | ||
![]() |
ab5d1d27f1 | ||
![]() |
1c10b85fed | ||
![]() |
91a7db08ff | ||
![]() |
a764d54123 | ||
![]() |
dc09e33556 | ||
![]() |
14173bd9ec | ||
![]() |
d2e7537629 | ||
![]() |
9a165a64fe | ||
![]() |
9c749a6abc | ||
![]() |
2e33222c71 | ||
![]() |
ab1c2c4f70 | ||
![]() |
529219ae69 | ||
![]() |
d6ce71fa61 | ||
![]() |
e5b67d513a | ||
![]() |
a547179f66 | ||
![]() |
8c61788a7d | ||
![]() |
6b934d94db | ||
![]() |
d30ad82774 | ||
![]() |
4618b33e93 | ||
![]() |
d6299094db | ||
![]() |
087d9d30c0 | ||
![]() |
f07890cf5c | ||
![]() |
e5b78cc481 | ||
![]() |
12b409d8e1 | ||
![]() |
def5408db8 | ||
![]() |
f105b45ee2 | ||
![]() |
9d904c30a7 | ||
![]() |
99b047939f | ||
![]() |
3a615908ee | ||
![]() |
baff541f46 | ||
![]() |
6d8c35cfe9 | ||
![]() |
b8d9883e74 | ||
![]() |
c3c65af450 | ||
![]() |
3af8616764 | ||
![]() |
64ec4609c5 | ||
![]() |
c78bc26b83 | ||
![]() |
0c093646c9 | ||
![]() |
1b27acdde0 | ||
![]() |
9dafc0e02f | ||
![]() |
0091dafcb0 | ||
![]() |
b387acffb7 | ||
![]() |
36b3133fa2 | ||
![]() |
fe01e96012 | ||
![]() |
0b56ec16ed | ||
![]() |
ca79f4c963 | ||
![]() |
9a43f2776d | ||
![]() |
0ac7cb311d | ||
![]() |
3472020812 | ||
![]() |
dcd09523a6 | ||
![]() |
a5bfdc697b | ||
![]() |
dbb29a7c7d | ||
![]() |
124a63d846 | ||
![]() |
3de701a9ab | ||
![]() |
bfe1dd65b3 | ||
![]() |
71bf5e14cc | ||
![]() |
6d231c2c99 | ||
![]() |
b93072865b | ||
![]() |
14ebb6cd74 | ||
![]() |
2ddbcd560e | ||
![]() |
c5ff7ed1c9 | ||
![]() |
c4bea5616c | ||
![]() |
17fe147726 | ||
![]() |
9fae4e7e1f | ||
![]() |
0cebca498c | ||
![]() |
521ff62aae | ||
![]() |
fd1df5ad88 | ||
![]() |
91e7a35a07 | ||
![]() |
09381abf46 | ||
![]() |
3713c03c07 | ||
![]() |
bd8ddd7cd8 | ||
![]() |
f0dc1f927b | ||
![]() |
984590c6d1 | ||
![]() |
d324021a3f | ||
![]() |
1f4c0b3e9b | ||
![]() |
69893aba4b | ||
![]() |
b9dcf89b37 | ||
![]() |
54fd55a1c6 | ||
![]() |
cc64fa639d | ||
![]() |
84140ba414 | ||
![]() |
d1726b84c8 | ||
![]() |
4724ecbc38 | ||
![]() |
85afe87b5e | ||
![]() |
5960179844 | ||
![]() |
9f8f7d2fde | ||
![]() |
4c22264b13 | ||
![]() |
baf4382724 | ||
![]() |
8263ea4a4a | ||
![]() |
8412581be4 | ||
![]() |
207c848438 | ||
![]() |
2b61601fd7 | ||
![]() |
ee506e6c14 | ||
![]() |
8003a49571 | ||
![]() |
e438b11afb | ||
![]() |
64ba43703c | ||
![]() |
1d214ae120 | ||
![]() |
68d987f866 | ||
![]() |
299cc5e40c | ||
![]() |
2c3456177e | ||
![]() |
1ef90180cc | ||
![]() |
4c1364dfd1 | ||
![]() |
09a44a6a30 | ||
![]() |
63303bdcde | ||
![]() |
59cd24f54b | ||
![]() |
82b9fead39 | ||
![]() |
a879e36e9b | ||
![]() |
b12c458188 | ||
![]() |
4985f9a5a1 | ||
![]() |
ae70ca7cba | ||
![]() |
66d1cf8af7 | ||
![]() |
27c0df3da8 | ||
![]() |
5413131885 | ||
![]() |
0acd77e60a | ||
![]() |
c5d552dc4a | ||
![]() |
4f045b45ac | ||
![]() |
4ad29161bd | ||
![]() |
fea7f537a8 | ||
![]() |
442b6e9cca | ||
![]() |
531b67101d | ||
![]() |
596a3fc879 | ||
![]() |
0e8295604e | ||
![]() |
9cfdb99e76 | ||
![]() |
b1a6e403fb | ||
![]() |
937d3e4a96 | ||
![]() |
a368ad4ab5 | ||
![]() |
254694b024 | ||
![]() |
bcfa7a7383 | ||
![]() |
ab7081d26a | ||
![]() |
4762c64c25 | ||
![]() |
1b99ffe61b | ||
![]() |
d5132e8ea9 | ||
![]() |
1bcf3cfbb2 | ||
![]() |
d4d912ef55 | ||
![]() |
393826635b | ||
![]() |
9edd5c35e0 | ||
![]() |
e8c1d3dc3c | ||
![]() |
46463ea4f8 | ||
![]() |
88e6b0c8d9 | ||
![]() |
2ed92c720f | ||
![]() |
7389f23d9a | ||
![]() |
a0cef80cf2 | ||
![]() |
343b17788f | ||
![]() |
50349e49f1 | ||
![]() |
42d0415a86 | ||
![]() |
1428b41a25 | ||
![]() |
e65b4292b2 | ||
![]() |
2fc2bb97fc | ||
![]() |
40da606177 | ||
![]() |
d613b69e4e | ||
![]() |
3c5d09e114 | ||
![]() |
9c54cc369b | ||
![]() |
f91e4090f9 | ||
![]() |
2cdf0b74d5 | ||
![]() |
86750ae5c3 | ||
![]() |
c1eb492616 | ||
![]() |
ac154c020c | ||
![]() |
0e23eb9ebd | ||
![]() |
8367930f42 | ||
![]() |
d71b1246cf | ||
![]() |
4ad664a652 | ||
![]() |
002493c3e1 | ||
![]() |
720ecde568 | ||
![]() |
4c548830b4 | ||
![]() |
214925e10a | ||
![]() |
885256299f | ||
![]() |
e73c670025 | ||
![]() |
e3c0cfd1e2 | ||
![]() |
46c38f185c | ||
![]() |
9082637133 | ||
![]() |
0a35fd0ea4 | ||
![]() |
c4649fc068 | ||
![]() |
b496637bdd | ||
![]() |
7d471f9624 | ||
![]() |
83f3b3e3eb | ||
![]() |
4b8e267282 | ||
![]() |
0cda883b56 | ||
![]() |
ae58e633f0 | ||
![]() |
06480bfd9d | ||
![]() |
625f586945 | ||
![]() |
7dbeaa475d | ||
![]() |
dff3d5f8af | ||
![]() |
89c335919a | ||
![]() |
2bb4573357 | ||
![]() |
7037ce989c | ||
![]() |
bfdd2053ba | ||
![]() |
fcc3f92f8c | ||
![]() |
8710267d53 | ||
![]() |
85b6adcc9a | ||
![]() |
beec6e86e0 | ||
![]() |
3dacffaaf9 | ||
![]() |
d90f2a1de1 | ||
![]() |
b6c9217429 | ||
![]() |
7fc8da6769 | ||
![]() |
fa3eb1b3fe | ||
![]() |
a306114855 | ||
![]() |
3cafe318c1 | ||
![]() |
33ac13185a |
77
.claude/agents/quality-scale-rule-verifier.md
Normal file
77
.claude/agents/quality-scale-rule-verifier.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
---
|
||||||
|
name: quality-scale-rule-verifier
|
||||||
|
description: |
|
||||||
|
Use this agent when you need to verify that a Home Assistant integration follows a specific quality scale rule. This includes checking if the integration implements required patterns, configurations, or code structures defined by the quality scale system.
|
||||||
|
|
||||||
|
<example>
|
||||||
|
Context: The user wants to verify if an integration follows a specific quality scale rule.
|
||||||
|
user: "Check if the peblar integration follows the config-flow rule"
|
||||||
|
assistant: "I'll use the quality scale rule verifier to check if the peblar integration properly implements the config-flow rule."
|
||||||
|
<commentary>
|
||||||
|
Since the user is asking to verify a quality scale rule implementation, use the quality-scale-rule-verifier agent.
|
||||||
|
</commentary>
|
||||||
|
</example>
|
||||||
|
|
||||||
|
<example>
|
||||||
|
Context: The user is reviewing if an integration reaches a specific quality scale level.
|
||||||
|
user: "Verify that this integration reaches the bronze quality scale"
|
||||||
|
assistant: "Let me use the quality scale rule verifier to check the bronze quality scale implementation."
|
||||||
|
<commentary>
|
||||||
|
The user wants to verify the integration has reached a certain quality level, so use multiple quality-scale-rule-verifier agents to verify each bronze rule.
|
||||||
|
</commentary>
|
||||||
|
</example>
|
||||||
|
model: inherit
|
||||||
|
color: yellow
|
||||||
|
tools: Read, Bash, Grep, Glob, WebFetch
|
||||||
|
---
|
||||||
|
|
||||||
|
You are an expert Home Assistant integration quality scale auditor specializing in verifying compliance with specific quality scale rules. You have deep knowledge of Home Assistant's architecture, best practices, and the quality scale system that ensures integration consistency and reliability.
|
||||||
|
|
||||||
|
You will verify if an integration follows a specific quality scale rule by:
|
||||||
|
|
||||||
|
1. **Fetching Rule Documentation**: Retrieve the official rule documentation from:
|
||||||
|
`https://raw.githubusercontent.com/home-assistant/developers.home-assistant/refs/heads/master/docs/core/integration-quality-scale/rules/{rule_name}.md`
|
||||||
|
where `{rule_name}` is the rule identifier (e.g., 'config-flow', 'entity-unique-id', 'parallel-updates')
|
||||||
|
|
||||||
|
2. **Understanding Rule Requirements**: Parse the rule documentation to identify:
|
||||||
|
- Core requirements and mandatory implementations
|
||||||
|
- Specific code patterns or configurations required
|
||||||
|
- Common violations and anti-patterns
|
||||||
|
- Exemption criteria (when a rule might not apply)
|
||||||
|
- The quality tier this rule belongs to (Bronze, Silver, Gold, Platinum)
|
||||||
|
|
||||||
|
3. **Analyzing Integration Code**: Examine the integration's codebase at `homeassistant/components/<integration domain>` focusing on:
|
||||||
|
- `manifest.json` for quality scale declaration and configuration
|
||||||
|
- `quality_scale.yaml` for rule status (done, todo, exempt)
|
||||||
|
- Relevant Python modules based on the rule requirements
|
||||||
|
- Configuration files and service definitions as needed
|
||||||
|
|
||||||
|
4. **Verification Process**:
|
||||||
|
- Check if the rule is marked as 'done', 'todo', or 'exempt' in quality_scale.yaml
|
||||||
|
- If marked 'exempt', verify the exemption reason is valid
|
||||||
|
- If marked 'done', verify the actual implementation matches requirements
|
||||||
|
- Identify specific files and code sections that demonstrate compliance or violations
|
||||||
|
- Consider the integration's declared quality tier when applying rules
|
||||||
|
- To fetch the integration docs, use WebFetch to fetch from `https://raw.githubusercontent.com/home-assistant/home-assistant.io/refs/heads/current/source/_integrations/<integration domain>.markdown`
|
||||||
|
- To fetch information about a PyPI package, use the URL `https://pypi.org/pypi/<package>/json`
|
||||||
|
|
||||||
|
5. **Reporting Findings**: Provide a comprehensive verification report that includes:
|
||||||
|
- **Rule Summary**: Brief description of what the rule requires
|
||||||
|
- **Compliance Status**: Clear pass/fail/exempt determination
|
||||||
|
- **Evidence**: Specific code examples showing compliance or violations
|
||||||
|
- **Issues Found**: Detailed list of any non-compliance issues with file locations
|
||||||
|
- **Recommendations**: Actionable steps to achieve compliance if needed
|
||||||
|
- **Exemption Analysis**: If applicable, whether the exemption is justified
|
||||||
|
|
||||||
|
When examining code, you will:
|
||||||
|
- Look for exact implementation patterns specified in the rule
|
||||||
|
- Verify all required components are present and properly configured
|
||||||
|
- Check for common mistakes and anti-patterns
|
||||||
|
- Consider edge cases and error handling requirements
|
||||||
|
- Validate that implementations follow Home Assistant conventions
|
||||||
|
|
||||||
|
You will be thorough but focused, examining only the aspects relevant to the specific rule being verified. You will provide clear, actionable feedback that helps developers understand both what needs to be fixed and why it matters for integration quality.
|
||||||
|
|
||||||
|
If you cannot access the rule documentation or find the integration code, clearly state what information is missing and what you would need to complete the verification.
|
||||||
|
|
||||||
|
Remember that quality scale rules are cumulative - Bronze rules apply to all integrations with a quality scale, Silver rules apply to Silver+ integrations, and so on. Always consider the integration's target quality level when determining which rules should be enforced.
|
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -55,8 +55,12 @@
|
|||||||
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
||||||
We're here to help! This is simply a reminder of what we are going to look
|
We're here to help! This is simply a reminder of what we are going to look
|
||||||
for before merging your code.
|
for before merging your code.
|
||||||
|
|
||||||
|
AI tools are welcome, but contributors are responsible for *fully*
|
||||||
|
understanding the code before submitting a PR.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
- [ ] I understand the code I am submitting and can explain how it works.
|
||||||
- [ ] The code change is tested and works locally.
|
- [ ] The code change is tested and works locally.
|
||||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||||
- [ ] There is no commented out code in this PR.
|
- [ ] There is no commented out code in this PR.
|
||||||
@@ -64,6 +68,7 @@
|
|||||||
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
|
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
|
||||||
- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
|
- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
|
||||||
- [ ] Tests have been added to verify that the new code works.
|
- [ ] Tests have been added to verify that the new code works.
|
||||||
|
- [ ] Any generated code has been carefully reviewed for correctness and compliance with project standards.
|
||||||
|
|
||||||
If user exposed functionality or configuration variables are added/changed:
|
If user exposed functionality or configuration variables are added/changed:
|
||||||
|
|
||||||
|
46
.github/workflows/builder.yml
vendored
46
.github/workflows/builder.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
|||||||
publish: ${{ steps.version.outputs.publish }}
|
publish: ${{ steps.version.outputs.publish }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@@ -69,7 +69,7 @@ jobs:
|
|||||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||||
|
|
||||||
- name: Upload translations
|
- name: Upload translations
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
path: translations.tar.gz
|
path: translations.tar.gz
|
||||||
@@ -90,11 +90,11 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v11
|
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: home-assistant/frontend
|
repo: home-assistant/frontend
|
||||||
@@ -105,7 +105,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Download nightly wheels of intents
|
- name: Download nightly wheels of intents
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v11
|
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: OHF-Voice/intents-package
|
repo: OHF-Voice/intents-package
|
||||||
@@ -116,7 +116,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@@ -175,7 +175,7 @@ jobs:
|
|||||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@@ -190,14 +190,15 @@ jobs:
|
|||||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.5.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# home-assistant/builder doesn't support sha pinning
|
||||||
- name: Build base image
|
- name: Build base image
|
||||||
uses: home-assistant/builder@2025.03.0
|
uses: home-assistant/builder@2025.09.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@@ -242,7 +243,7 @@ jobs:
|
|||||||
- green
|
- green
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Set build additional args
|
- name: Set build additional args
|
||||||
run: |
|
run: |
|
||||||
@@ -256,14 +257,15 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.5.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# home-assistant/builder doesn't support sha pinning
|
||||||
- name: Build base image
|
- name: Build base image
|
||||||
uses: home-assistant/builder@2025.03.0
|
uses: home-assistant/builder@2025.09.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@@ -279,7 +281,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
uses: home-assistant/actions/helpers/git-init@master
|
uses: home-assistant/actions/helpers/git-init@master
|
||||||
@@ -321,23 +323,23 @@ jobs:
|
|||||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.9.2
|
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.2.3"
|
cosign-release: "v2.2.3"
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: matrix.registry == 'docker.io/homeassistant'
|
if: matrix.registry == 'docker.io/homeassistant'
|
||||||
uses: docker/login-action@v3.5.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||||
uses: docker/login-action@v3.5.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -454,15 +456,15 @@ jobs:
|
|||||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@@ -480,7 +482,7 @@ jobs:
|
|||||||
python -m build
|
python -m build
|
||||||
|
|
||||||
- name: Upload package to PyPI
|
- name: Upload package to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||||
with:
|
with:
|
||||||
skip-existing: true
|
skip-existing: true
|
||||||
|
|
||||||
|
307
.github/workflows/ci.yaml
vendored
307
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 7
|
CACHE_VERSION: 8
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 1
|
MYPY_CACHE_VERSION: 1
|
||||||
HA_SHORT_VERSION: "2025.10"
|
HA_SHORT_VERSION: "2025.10"
|
||||||
@@ -61,6 +61,9 @@ env:
|
|||||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||||
UV_CACHE_DIR: /tmp/uv-cache
|
UV_CACHE_DIR: /tmp/uv-cache
|
||||||
|
APT_CACHE_BASE: /home/runner/work/apt
|
||||||
|
APT_CACHE_DIR: /home/runner/work/apt/cache
|
||||||
|
APT_LIST_CACHE_DIR: /home/runner/work/apt/lists
|
||||||
SQLALCHEMY_WARN_20: 1
|
SQLALCHEMY_WARN_20: 1
|
||||||
PYTHONASYNCIODEBUG: 1
|
PYTHONASYNCIODEBUG: 1
|
||||||
HASS_CI: 1
|
HASS_CI: 1
|
||||||
@@ -78,6 +81,7 @@ jobs:
|
|||||||
core: ${{ steps.core.outputs.changes }}
|
core: ${{ steps.core.outputs.changes }}
|
||||||
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
||||||
integrations: ${{ steps.integrations.outputs.changes }}
|
integrations: ${{ steps.integrations.outputs.changes }}
|
||||||
|
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
|
||||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||||
requirements: ${{ steps.core.outputs.requirements }}
|
requirements: ${{ steps.core.outputs.requirements }}
|
||||||
@@ -94,7 +98,7 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Generate partial Python venv restore key
|
- name: Generate partial Python venv restore key
|
||||||
id: generate_python_cache_key
|
id: generate_python_cache_key
|
||||||
run: |
|
run: |
|
||||||
@@ -111,8 +115,12 @@ jobs:
|
|||||||
run: >-
|
run: >-
|
||||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Generate partial apt restore key
|
||||||
|
id: generate_apt_cache_key
|
||||||
|
run: |
|
||||||
|
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||||
- name: Filter for core changes
|
- name: Filter for core changes
|
||||||
uses: dorny/paths-filter@v3.0.2
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
id: core
|
id: core
|
||||||
with:
|
with:
|
||||||
filters: .core_files.yaml
|
filters: .core_files.yaml
|
||||||
@@ -127,7 +135,7 @@ jobs:
|
|||||||
echo "Result:"
|
echo "Result:"
|
||||||
cat .integration_paths.yaml
|
cat .integration_paths.yaml
|
||||||
- name: Filter for integration changes
|
- name: Filter for integration changes
|
||||||
uses: dorny/paths-filter@v3.0.2
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
id: integrations
|
id: integrations
|
||||||
with:
|
with:
|
||||||
filters: .integration_paths.yaml
|
filters: .integration_paths.yaml
|
||||||
@@ -246,16 +254,16 @@ jobs:
|
|||||||
- info
|
- info
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.4
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@@ -271,7 +279,7 @@ jobs:
|
|||||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.2.4
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@@ -292,16 +300,16 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -310,7 +318,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -332,16 +340,16 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -350,7 +358,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -372,16 +380,16 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -390,7 +398,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -462,7 +470,7 @@ jobs:
|
|||||||
- script/hassfest/docker/Dockerfile
|
- script/hassfest/docker/Dockerfile
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -481,10 +489,10 @@ jobs:
|
|||||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@@ -497,7 +505,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.4
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@@ -505,7 +513,7 @@ jobs:
|
|||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore uv wheel cache
|
- name: Restore uv wheel cache
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4.2.4
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.UV_CACHE_DIR }}
|
path: ${{ env.UV_CACHE_DIR }}
|
||||||
key: >-
|
key: >-
|
||||||
@@ -515,16 +523,38 @@ jobs:
|
|||||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||||
env.HA_SHORT_VERSION }}-
|
env.HA_SHORT_VERSION }}-
|
||||||
|
- name: Check if apt cache exists
|
||||||
|
id: cache-apt-check
|
||||||
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
|
with:
|
||||||
|
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: |
|
||||||
|
steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
|| steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
sudo apt-get update
|
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||||
|
mkdir -p ${{ env.APT_CACHE_DIR }}
|
||||||
|
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
sudo apt-get update \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
|
libxml2-utils \
|
||||||
libavcodec-dev \
|
libavcodec-dev \
|
||||||
libavdevice-dev \
|
libavdevice-dev \
|
||||||
libavfilter-dev \
|
libavfilter-dev \
|
||||||
@@ -534,6 +564,19 @@ jobs:
|
|||||||
libswresample-dev \
|
libswresample-dev \
|
||||||
libswscale-dev \
|
libswscale-dev \
|
||||||
libudev-dev
|
libudev-dev
|
||||||
|
|
||||||
|
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||||
|
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
|
||||||
|
fi
|
||||||
|
- name: Save apt cache
|
||||||
|
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||||
|
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Create Python virtual environment
|
- name: Create Python virtual environment
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -553,7 +596,7 @@ jobs:
|
|||||||
python --version
|
python --version
|
||||||
uv pip freeze >> pip_freeze.txt
|
uv pip freeze >> pip_freeze.txt
|
||||||
- name: Upload pip_freeze artifact
|
- name: Upload pip_freeze artifact
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pip-freeze-${{ matrix.python-version }}
|
name: pip-freeze-${{ matrix.python-version }}
|
||||||
path: pip_freeze.txt
|
path: pip_freeze.txt
|
||||||
@@ -578,24 +621,37 @@ jobs:
|
|||||||
- info
|
- info
|
||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
|
- name: Restore apt cache
|
||||||
|
uses: actions/cache/restore@v4.2.4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
fail-on-cache-miss: true
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
sudo apt-get update
|
sudo apt-get update \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
libturbojpeg
|
libturbojpeg
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -619,16 +675,16 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -653,9 +709,9 @@ jobs:
|
|||||||
&& github.event_name == 'pull_request'
|
&& github.event_name == 'pull_request'
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Dependency review
|
- name: Dependency review
|
||||||
uses: actions/dependency-review-action@v4.7.3
|
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
|
||||||
with:
|
with:
|
||||||
license-check: false # We use our own license audit checks
|
license-check: false # We use our own license audit checks
|
||||||
|
|
||||||
@@ -676,16 +732,16 @@ jobs:
|
|||||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -697,7 +753,7 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||||
- name: Upload licenses
|
- name: Upload licenses
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||||
path: licenses-${{ matrix.python-version }}.json
|
path: licenses-${{ matrix.python-version }}.json
|
||||||
@@ -719,16 +775,16 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -766,16 +822,16 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -811,10 +867,10 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@@ -827,7 +883,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -835,7 +891,7 @@ jobs:
|
|||||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore mypy cache
|
- name: Restore mypy cache
|
||||||
uses: actions/cache@v4.2.4
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: .mypy_cache
|
path: .mypy_cache
|
||||||
key: >-
|
key: >-
|
||||||
@@ -878,27 +934,40 @@ jobs:
|
|||||||
- mypy
|
- mypy
|
||||||
name: Split tests for full run
|
name: Split tests for full run
|
||||||
steps:
|
steps:
|
||||||
|
- name: Restore apt cache
|
||||||
|
uses: actions/cache/restore@v4.2.4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
fail-on-cache-miss: true
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
sudo apt-get update
|
sudo apt-get update \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -910,7 +979,7 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||||
- name: Upload pytest_buckets
|
- name: Upload pytest_buckets
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
path: pytest_buckets.txt
|
path: pytest_buckets.txt
|
||||||
@@ -939,28 +1008,41 @@ jobs:
|
|||||||
name: >-
|
name: >-
|
||||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||||
steps:
|
steps:
|
||||||
|
- name: Restore apt cache
|
||||||
|
uses: actions/cache/restore@v4.2.4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
fail-on-cache-miss: true
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
sudo apt-get update
|
sudo apt-get update \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev \
|
libgammu-dev \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -974,7 +1056,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||||
- name: Download pytest_buckets
|
- name: Download pytest_buckets
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
- name: Compile English translations
|
- name: Compile English translations
|
||||||
@@ -1013,14 +1095,14 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
@@ -1033,7 +1115,7 @@ jobs:
|
|||||||
mv "junit.xml-tmp" "junit.xml"
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
- name: Upload test results artifact
|
- name: Upload test results artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: junit.xml
|
path: junit.xml
|
||||||
@@ -1073,28 +1155,41 @@ jobs:
|
|||||||
name: >-
|
name: >-
|
||||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: Restore apt cache
|
||||||
|
uses: actions/cache/restore@v4.2.4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
fail-on-cache-miss: true
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
sudo apt-get update
|
sudo apt-get update \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1153,7 +1248,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@@ -1161,7 +1256,7 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@@ -1175,7 +1270,7 @@ jobs:
|
|||||||
mv "junit.xml-tmp" "junit.xml"
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
- name: Upload test results artifact
|
- name: Upload test results artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@@ -1214,12 +1309,25 @@ jobs:
|
|||||||
name: >-
|
name: >-
|
||||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: Restore apt cache
|
||||||
|
uses: actions/cache/restore@v4.2.4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
fail-on-cache-miss: true
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
sudo apt-get update
|
sudo apt-get update \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
@@ -1228,16 +1336,16 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
postgresql-server-dev-14
|
postgresql-server-dev-14
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1297,7 +1405,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@@ -1305,7 +1413,7 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@@ -1319,7 +1427,7 @@ jobs:
|
|||||||
mv "junit.xml-tmp" "junit.xml"
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
- name: Upload test results artifact
|
- name: Upload test results artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@@ -1340,14 +1448,14 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
if: needs.info.outputs.test_full_suite == 'true'
|
||||||
uses: codecov/codecov-action@v5.5.1
|
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
flags: full-suite
|
flags: full-suite
|
||||||
@@ -1376,28 +1484,41 @@ jobs:
|
|||||||
name: >-
|
name: >-
|
||||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||||
steps:
|
steps:
|
||||||
|
- name: Restore apt cache
|
||||||
|
uses: actions/cache/restore@v4.2.4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ env.APT_CACHE_DIR }}
|
||||||
|
${{ env.APT_LIST_CACHE_DIR }}
|
||||||
|
fail-on-cache-miss: true
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
sudo apt-get update
|
sudo apt-get update \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
|
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||||
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev \
|
libgammu-dev \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.4
|
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1453,14 +1574,14 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
@@ -1473,7 +1594,7 @@ jobs:
|
|||||||
mv "junit.xml-tmp" "junit.xml"
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
- name: Upload test results artifact
|
- name: Upload test results artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: junit.xml
|
path: junit.xml
|
||||||
@@ -1491,14 +1612,14 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
if: needs.info.outputs.test_full_suite == 'false'
|
||||||
uses: codecov/codecov-action@v5.5.1
|
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -1518,11 +1639,11 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: test-results-*
|
pattern: test-results-*
|
||||||
- name: Upload test results to Codecov
|
- name: Upload test results to Codecov
|
||||||
uses: codecov/test-results-action@v1
|
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
verbose: true
|
verbose: true
|
||||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.30.1
|
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.30.1
|
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
@@ -16,7 +16,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check if integration label was added and extract details
|
- name: Check if integration label was added and extract details
|
||||||
id: extract
|
id: extract
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
// Debug: Log the event payload
|
// Debug: Log the event payload
|
||||||
@@ -113,7 +113,7 @@ jobs:
|
|||||||
- name: Fetch similar issues
|
- name: Fetch similar issues
|
||||||
id: fetch_similar
|
id: fetch_similar
|
||||||
if: steps.extract.outputs.should_continue == 'true'
|
if: steps.extract.outputs.should_continue == 'true'
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||||
env:
|
env:
|
||||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||||
@@ -231,7 +231,7 @@ jobs:
|
|||||||
- name: Detect duplicates using AI
|
- name: Detect duplicates using AI
|
||||||
id: ai_detection
|
id: ai_detection
|
||||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||||
uses: actions/ai-inference@v2.0.1
|
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||||
with:
|
with:
|
||||||
model: openai/gpt-4o
|
model: openai/gpt-4o
|
||||||
system-prompt: |
|
system-prompt: |
|
||||||
@@ -280,7 +280,7 @@ jobs:
|
|||||||
- name: Post duplicate detection results
|
- name: Post duplicate detection results
|
||||||
id: post_results
|
id: post_results
|
||||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||||
env:
|
env:
|
||||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||||
|
@@ -16,7 +16,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check issue language
|
- name: Check issue language
|
||||||
id: detect_language
|
id: detect_language
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||||
env:
|
env:
|
||||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
- name: Detect language using AI
|
- name: Detect language using AI
|
||||||
id: ai_language_detection
|
id: ai_language_detection
|
||||||
if: steps.detect_language.outputs.should_continue == 'true'
|
if: steps.detect_language.outputs.should_continue == 'true'
|
||||||
uses: actions/ai-inference@v2.0.1
|
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||||
with:
|
with:
|
||||||
model: openai/gpt-4o-mini
|
model: openai/gpt-4o-mini
|
||||||
system-prompt: |
|
system-prompt: |
|
||||||
@@ -90,7 +90,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Process non-English issues
|
- name: Process non-English issues
|
||||||
if: steps.detect_language.outputs.should_continue == 'true'
|
if: steps.detect_language.outputs.should_continue == 'true'
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||||
env:
|
env:
|
||||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
|||||||
if: github.repository_owner == 'home-assistant'
|
if: github.repository_owner == 'home-assistant'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v5.0.1
|
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ github.token }}
|
github-token: ${{ github.token }}
|
||||||
issue-inactive-days: "30"
|
issue-inactive-days: "30"
|
||||||
|
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
|||||||
if: github.event.issue.type.name == 'Task'
|
if: github.event.issue.type.name == 'Task'
|
||||||
steps:
|
steps:
|
||||||
- name: Check if user is authorized
|
- name: Check if user is authorized
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const issueAuthor = context.payload.issue.user.login;
|
const issueAuthor = context.payload.issue.user.login;
|
||||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
# - No PRs marked as no-stale
|
# - No PRs marked as no-stale
|
||||||
# - No issues (-1)
|
# - No issues (-1)
|
||||||
- name: 60 days stale PRs policy
|
- name: 60 days stale PRs policy
|
||||||
uses: actions/stale@v10.0.0
|
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 60
|
days-before-stale: 60
|
||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
# - No issues marked as no-stale or help-wanted
|
# - No issues marked as no-stale or help-wanted
|
||||||
# - No PRs (-1)
|
# - No PRs (-1)
|
||||||
- name: 90 days stale issues
|
- name: 90 days stale issues
|
||||||
uses: actions/stale@v10.0.0
|
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ steps.token.outputs.token }}
|
repo-token: ${{ steps.token.outputs.token }}
|
||||||
days-before-stale: 90
|
days-before-stale: 90
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
# - No Issues marked as no-stale or help-wanted
|
# - No Issues marked as no-stale or help-wanted
|
||||||
# - No PRs (-1)
|
# - No PRs (-1)
|
||||||
- name: Needs more information stale issues policy
|
- name: Needs more information stale issues policy
|
||||||
uses: actions/stale@v10.0.0
|
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ steps.token.outputs.token }}
|
repo-token: ${{ steps.token.outputs.token }}
|
||||||
only-labels: "needs-more-information"
|
only-labels: "needs-more-information"
|
||||||
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
|
32
.github/workflows/wheels.yml
vendored
32
.github/workflows/wheels.yml
vendored
@@ -32,11 +32,11 @@ jobs:
|
|||||||
architectures: ${{ steps.info.outputs.architectures }}
|
architectures: ${{ steps.info.outputs.architectures }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@@ -91,7 +91,7 @@ jobs:
|
|||||||
) > build_constraints.txt
|
) > build_constraints.txt
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
path: ./.env_file
|
path: ./.env_file
|
||||||
@@ -99,14 +99,14 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload build_constraints
|
- name: Upload build_constraints
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
path: ./build_constraints.txt
|
path: ./build_constraints.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
path: ./requirements_diff.txt
|
path: ./requirements_diff.txt
|
||||||
@@ -118,7 +118,7 @@ jobs:
|
|||||||
python -m script.gen_requirements_all ci
|
python -m script.gen_requirements_all ci
|
||||||
|
|
||||||
- name: Upload requirements_all_wheels
|
- name: Upload requirements_all_wheels
|
||||||
uses: actions/upload-artifact@v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
path: ./requirements_all_wheels_*.txt
|
path: ./requirements_all_wheels_*.txt
|
||||||
@@ -135,20 +135,20 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download build_constraints
|
- name: Download build_constraints
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
@@ -158,6 +158,7 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements.txt
|
sed -i "/uv/d" requirements.txt
|
||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
|
# home-assistant/wheels doesn't support sha pinning
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2025.07.0
|
uses: home-assistant/wheels@2025.07.0
|
||||||
with:
|
with:
|
||||||
@@ -184,25 +185,25 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download build_constraints
|
- name: Download build_constraints
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
- name: Download requirements_all_wheels
|
- name: Download requirements_all_wheels
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
|
|
||||||
@@ -218,6 +219,7 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements.txt
|
sed -i "/uv/d" requirements.txt
|
||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
|
# home-assistant/wheels doesn't support sha pinning
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2025.07.0
|
uses: home-assistant/wheels@2025.07.0
|
||||||
with:
|
with:
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -140,5 +140,5 @@ tmp_cache
|
|||||||
pytest_buckets.txt
|
pytest_buckets.txt
|
||||||
|
|
||||||
# AI tooling
|
# AI tooling
|
||||||
.claude
|
.claude/settings.local.json
|
||||||
|
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.12.1
|
rev: v0.13.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-check
|
- id: ruff-check
|
||||||
args:
|
args:
|
||||||
|
@@ -142,6 +142,7 @@ homeassistant.components.cloud.*
|
|||||||
homeassistant.components.co2signal.*
|
homeassistant.components.co2signal.*
|
||||||
homeassistant.components.comelit.*
|
homeassistant.components.comelit.*
|
||||||
homeassistant.components.command_line.*
|
homeassistant.components.command_line.*
|
||||||
|
homeassistant.components.compit.*
|
||||||
homeassistant.components.config.*
|
homeassistant.components.config.*
|
||||||
homeassistant.components.configurator.*
|
homeassistant.components.configurator.*
|
||||||
homeassistant.components.cookidoo.*
|
homeassistant.components.cookidoo.*
|
||||||
@@ -402,6 +403,7 @@ homeassistant.components.person.*
|
|||||||
homeassistant.components.pi_hole.*
|
homeassistant.components.pi_hole.*
|
||||||
homeassistant.components.ping.*
|
homeassistant.components.ping.*
|
||||||
homeassistant.components.plugwise.*
|
homeassistant.components.plugwise.*
|
||||||
|
homeassistant.components.portainer.*
|
||||||
homeassistant.components.powerfox.*
|
homeassistant.components.powerfox.*
|
||||||
homeassistant.components.powerwall.*
|
homeassistant.components.powerwall.*
|
||||||
homeassistant.components.private_ble_device.*
|
homeassistant.components.private_ble_device.*
|
||||||
|
31
CODEOWNERS
generated
31
CODEOWNERS
generated
@@ -107,8 +107,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/ambient_station/ @bachya
|
/homeassistant/components/ambient_station/ @bachya
|
||||||
/tests/components/ambient_station/ @bachya
|
/tests/components/ambient_station/ @bachya
|
||||||
/homeassistant/components/amcrest/ @flacjacket
|
/homeassistant/components/amcrest/ @flacjacket
|
||||||
/homeassistant/components/analytics/ @home-assistant/core @ludeeus
|
/homeassistant/components/analytics/ @home-assistant/core
|
||||||
/tests/components/analytics/ @home-assistant/core @ludeeus
|
/tests/components/analytics/ @home-assistant/core
|
||||||
/homeassistant/components/analytics_insights/ @joostlek
|
/homeassistant/components/analytics_insights/ @joostlek
|
||||||
/tests/components/analytics_insights/ @joostlek
|
/tests/components/analytics_insights/ @joostlek
|
||||||
/homeassistant/components/android_ip_webcam/ @engrbm87
|
/homeassistant/components/android_ip_webcam/ @engrbm87
|
||||||
@@ -292,6 +292,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/command_line/ @gjohansson-ST
|
/tests/components/command_line/ @gjohansson-ST
|
||||||
/homeassistant/components/compensation/ @Petro31
|
/homeassistant/components/compensation/ @Petro31
|
||||||
/tests/components/compensation/ @Petro31
|
/tests/components/compensation/ @Petro31
|
||||||
|
/homeassistant/components/compit/ @Przemko92
|
||||||
|
/tests/components/compit/ @Przemko92
|
||||||
/homeassistant/components/config/ @home-assistant/core
|
/homeassistant/components/config/ @home-assistant/core
|
||||||
/tests/components/config/ @home-assistant/core
|
/tests/components/config/ @home-assistant/core
|
||||||
/homeassistant/components/configurator/ @home-assistant/core
|
/homeassistant/components/configurator/ @home-assistant/core
|
||||||
@@ -442,8 +444,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/energyzero/ @klaasnicolaas
|
/tests/components/energyzero/ @klaasnicolaas
|
||||||
/homeassistant/components/enigma2/ @autinerd
|
/homeassistant/components/enigma2/ @autinerd
|
||||||
/tests/components/enigma2/ @autinerd
|
/tests/components/enigma2/ @autinerd
|
||||||
/homeassistant/components/enocean/ @bdurrer
|
|
||||||
/tests/components/enocean/ @bdurrer
|
|
||||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||||
@@ -772,6 +772,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/iqvia/ @bachya
|
/homeassistant/components/iqvia/ @bachya
|
||||||
/tests/components/iqvia/ @bachya
|
/tests/components/iqvia/ @bachya
|
||||||
/homeassistant/components/irish_rail_transport/ @ttroy50
|
/homeassistant/components/irish_rail_transport/ @ttroy50
|
||||||
|
/homeassistant/components/irm_kmi/ @jdejaegh
|
||||||
|
/tests/components/irm_kmi/ @jdejaegh
|
||||||
/homeassistant/components/iron_os/ @tr4nt0r
|
/homeassistant/components/iron_os/ @tr4nt0r
|
||||||
/tests/components/iron_os/ @tr4nt0r
|
/tests/components/iron_os/ @tr4nt0r
|
||||||
/homeassistant/components/isal/ @bdraco
|
/homeassistant/components/isal/ @bdraco
|
||||||
@@ -970,6 +972,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/moat/ @bdraco
|
/tests/components/moat/ @bdraco
|
||||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||||
/tests/components/mobile_app/ @home-assistant/core
|
/tests/components/mobile_app/ @home-assistant/core
|
||||||
|
/homeassistant/components/modbus/ @janiversen
|
||||||
|
/tests/components/modbus/ @janiversen
|
||||||
/homeassistant/components/modem_callerid/ @tkdrob
|
/homeassistant/components/modem_callerid/ @tkdrob
|
||||||
/tests/components/modem_callerid/ @tkdrob
|
/tests/components/modem_callerid/ @tkdrob
|
||||||
/homeassistant/components/modern_forms/ @wonderslug
|
/homeassistant/components/modern_forms/ @wonderslug
|
||||||
@@ -1017,7 +1021,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||||
/homeassistant/components/nasweb/ @nasWebio
|
/homeassistant/components/nasweb/ @nasWebio
|
||||||
/tests/components/nasweb/ @nasWebio
|
/tests/components/nasweb/ @nasWebio
|
||||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM @heindrichpaul
|
||||||
|
/tests/components/nederlandse_spoorwegen/ @YarmoM @heindrichpaul
|
||||||
/homeassistant/components/ness_alarm/ @nickw444
|
/homeassistant/components/ness_alarm/ @nickw444
|
||||||
/tests/components/ness_alarm/ @nickw444
|
/tests/components/ness_alarm/ @nickw444
|
||||||
/homeassistant/components/nest/ @allenporter
|
/homeassistant/components/nest/ @allenporter
|
||||||
@@ -1191,6 +1196,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/pooldose/ @lmaertin
|
/tests/components/pooldose/ @lmaertin
|
||||||
/homeassistant/components/poolsense/ @haemishkyd
|
/homeassistant/components/poolsense/ @haemishkyd
|
||||||
/tests/components/poolsense/ @haemishkyd
|
/tests/components/poolsense/ @haemishkyd
|
||||||
|
/homeassistant/components/portainer/ @erwindouna
|
||||||
|
/tests/components/portainer/ @erwindouna
|
||||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||||
/tests/components/powerfox/ @klaasnicolaas
|
/tests/components/powerfox/ @klaasnicolaas
|
||||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||||
@@ -1347,6 +1354,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/samsungtv/ @chemelli74 @epenet
|
/tests/components/samsungtv/ @chemelli74 @epenet
|
||||||
/homeassistant/components/sanix/ @tomaszsluszniak
|
/homeassistant/components/sanix/ @tomaszsluszniak
|
||||||
/tests/components/sanix/ @tomaszsluszniak
|
/tests/components/sanix/ @tomaszsluszniak
|
||||||
|
/homeassistant/components/satel_integra/ @Tommatheussen
|
||||||
|
/tests/components/satel_integra/ @Tommatheussen
|
||||||
/homeassistant/components/scene/ @home-assistant/core
|
/homeassistant/components/scene/ @home-assistant/core
|
||||||
/tests/components/scene/ @home-assistant/core
|
/tests/components/scene/ @home-assistant/core
|
||||||
/homeassistant/components/schedule/ @home-assistant/core
|
/homeassistant/components/schedule/ @home-assistant/core
|
||||||
@@ -1528,8 +1537,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/switchbee/ @jafar-atili
|
/tests/components/switchbee/ @jafar-atili
|
||||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
|
||||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
|
||||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||||
/tests/components/switcher_kis/ @thecode @YogevBokobza
|
/tests/components/switcher_kis/ @thecode @YogevBokobza
|
||||||
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
|
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
|
||||||
@@ -1674,6 +1683,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/uptime_kuma/ @tr4nt0r
|
/tests/components/uptime_kuma/ @tr4nt0r
|
||||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||||
|
/homeassistant/components/usage_prediction/ @home-assistant/core
|
||||||
|
/tests/components/usage_prediction/ @home-assistant/core
|
||||||
/homeassistant/components/usb/ @bdraco
|
/homeassistant/components/usb/ @bdraco
|
||||||
/tests/components/usb/ @bdraco
|
/tests/components/usb/ @bdraco
|
||||||
/homeassistant/components/usgs_earthquakes_feed/ @exxamalte
|
/homeassistant/components/usgs_earthquakes_feed/ @exxamalte
|
||||||
@@ -1703,6 +1714,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||||
/homeassistant/components/vicare/ @CFenner
|
/homeassistant/components/vicare/ @CFenner
|
||||||
/tests/components/vicare/ @CFenner
|
/tests/components/vicare/ @CFenner
|
||||||
|
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||||
|
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||||
/homeassistant/components/vilfo/ @ManneW
|
/homeassistant/components/vilfo/ @ManneW
|
||||||
/tests/components/vilfo/ @ManneW
|
/tests/components/vilfo/ @ManneW
|
||||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||||
@@ -1718,8 +1731,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/volumio/ @OnFreund
|
/tests/components/volumio/ @OnFreund
|
||||||
/homeassistant/components/volvo/ @thomasddn
|
/homeassistant/components/volvo/ @thomasddn
|
||||||
/tests/components/volvo/ @thomasddn
|
/tests/components/volvo/ @thomasddn
|
||||||
/homeassistant/components/volvooncall/ @molobrakos
|
/homeassistant/components/volvooncall/ @molobrakos @svrooij
|
||||||
/tests/components/volvooncall/ @molobrakos
|
/tests/components/volvooncall/ @molobrakos @svrooij
|
||||||
/homeassistant/components/wake_on_lan/ @ntilley905
|
/homeassistant/components/wake_on_lan/ @ntilley905
|
||||||
/tests/components/wake_on_lan/ @ntilley905
|
/tests/components/wake_on_lan/ @ntilley905
|
||||||
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
|
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
|
||||||
|
@@ -6,7 +6,6 @@
|
|||||||
"google_assistant_sdk",
|
"google_assistant_sdk",
|
||||||
"google_cloud",
|
"google_cloud",
|
||||||
"google_drive",
|
"google_drive",
|
||||||
"google_gemini",
|
|
||||||
"google_generative_ai_conversation",
|
"google_generative_ai_conversation",
|
||||||
"google_mail",
|
"google_mail",
|
||||||
"google_maps",
|
"google_maps",
|
||||||
|
@@ -2,21 +2,23 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from accuweather import AccuWeather
|
from accuweather import AccuWeather
|
||||||
|
|
||||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
from homeassistant.const import CONF_API_KEY, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
from .const import DOMAIN
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
AccuWeatherConfigEntry,
|
AccuWeatherConfigEntry,
|
||||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||||
AccuWeatherData,
|
AccuWeatherData,
|
||||||
|
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||||
AccuWeatherObservationDataUpdateCoordinator,
|
AccuWeatherObservationDataUpdateCoordinator,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -28,7 +30,6 @@ PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
|||||||
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
||||||
"""Set up AccuWeather as config entry."""
|
"""Set up AccuWeather as config entry."""
|
||||||
api_key: str = entry.data[CONF_API_KEY]
|
api_key: str = entry.data[CONF_API_KEY]
|
||||||
name: str = entry.data[CONF_NAME]
|
|
||||||
|
|
||||||
location_key = entry.unique_id
|
location_key = entry.unique_id
|
||||||
|
|
||||||
@@ -41,26 +42,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
|||||||
hass,
|
hass,
|
||||||
entry,
|
entry,
|
||||||
accuweather,
|
accuweather,
|
||||||
name,
|
|
||||||
"observation",
|
|
||||||
UPDATE_INTERVAL_OBSERVATION,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
entry,
|
entry,
|
||||||
accuweather,
|
accuweather,
|
||||||
name,
|
)
|
||||||
"daily forecast",
|
coordinator_hourly_forecast = AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
hass,
|
||||||
|
entry,
|
||||||
|
accuweather,
|
||||||
)
|
)
|
||||||
|
|
||||||
await coordinator_observation.async_config_entry_first_refresh()
|
await asyncio.gather(
|
||||||
await coordinator_daily_forecast.async_config_entry_first_refresh()
|
coordinator_observation.async_config_entry_first_refresh(),
|
||||||
|
coordinator_daily_forecast.async_config_entry_first_refresh(),
|
||||||
|
coordinator_hourly_forecast.async_config_entry_first_refresh(),
|
||||||
|
)
|
||||||
|
|
||||||
entry.runtime_data = AccuWeatherData(
|
entry.runtime_data = AccuWeatherData(
|
||||||
coordinator_observation=coordinator_observation,
|
coordinator_observation=coordinator_observation,
|
||||||
coordinator_daily_forecast=coordinator_daily_forecast,
|
coordinator_daily_forecast=coordinator_daily_forecast,
|
||||||
|
coordinator_hourly_forecast=coordinator_hourly_forecast,
|
||||||
)
|
)
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
|
from collections.abc import Mapping
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
||||||
@@ -22,6 +23,8 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
"""Config flow for AccuWeather."""
|
"""Config flow for AccuWeather."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
_latitude: float | None = None
|
||||||
|
_longitude: float | None = None
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
@@ -50,6 +53,7 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
await self.async_set_unique_id(
|
await self.async_set_unique_id(
|
||||||
accuweather.location_key, raise_on_progress=False
|
accuweather.location_key, raise_on_progress=False
|
||||||
)
|
)
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=user_input[CONF_NAME], data=user_input
|
title=user_input[CONF_NAME], data=user_input
|
||||||
@@ -73,3 +77,46 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
),
|
),
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle configuration by re-auth."""
|
||||||
|
self._latitude = entry_data[CONF_LATITUDE]
|
||||||
|
self._longitude = entry_data[CONF_LONGITUDE]
|
||||||
|
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Dialog that informs the user that reauth is required."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
websession = async_get_clientsession(self.hass)
|
||||||
|
try:
|
||||||
|
async with timeout(10):
|
||||||
|
accuweather = AccuWeather(
|
||||||
|
user_input[CONF_API_KEY],
|
||||||
|
websession,
|
||||||
|
latitude=self._latitude,
|
||||||
|
longitude=self._longitude,
|
||||||
|
)
|
||||||
|
await accuweather.async_get_location()
|
||||||
|
except (ApiError, ClientConnectorError, TimeoutError, ClientError):
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidApiKeyError:
|
||||||
|
errors["base"] = "invalid_api_key"
|
||||||
|
except RequestsExceededError:
|
||||||
|
errors["base"] = "requests_exceeded"
|
||||||
|
else:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self._get_reauth_entry(), data_updates=user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
@@ -69,5 +69,6 @@ POLLEN_CATEGORY_MAP = {
|
|||||||
4: "very_high",
|
4: "very_high",
|
||||||
5: "extreme",
|
5: "extreme",
|
||||||
}
|
}
|
||||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||||
|
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
@@ -12,7 +13,9 @@ from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExcee
|
|||||||
from aiohttp.client_exceptions import ClientConnectorError
|
from aiohttp.client_exceptions import ClientConnectorError
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_NAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
from homeassistant.helpers.update_coordinator import (
|
from homeassistant.helpers.update_coordinator import (
|
||||||
DataUpdateCoordinator,
|
DataUpdateCoordinator,
|
||||||
@@ -20,9 +23,15 @@ from homeassistant.helpers.update_coordinator import (
|
|||||||
UpdateFailed,
|
UpdateFailed,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .const import DOMAIN, MANUFACTURER
|
from .const import (
|
||||||
|
DOMAIN,
|
||||||
|
MANUFACTURER,
|
||||||
|
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||||
|
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||||
|
UPDATE_INTERVAL_OBSERVATION,
|
||||||
|
)
|
||||||
|
|
||||||
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
|
EXCEPTIONS = (ApiError, ClientConnectorError, RequestsExceededError)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -33,6 +42,7 @@ class AccuWeatherData:
|
|||||||
|
|
||||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||||
|
coordinator_hourly_forecast: AccuWeatherHourlyForecastDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||||
@@ -43,18 +53,18 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||||||
):
|
):
|
||||||
"""Class to manage fetching AccuWeather data API."""
|
"""Class to manage fetching AccuWeather data API."""
|
||||||
|
|
||||||
|
config_entry: AccuWeatherConfigEntry
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: AccuWeatherConfigEntry,
|
config_entry: AccuWeatherConfigEntry,
|
||||||
accuweather: AccuWeather,
|
accuweather: AccuWeather,
|
||||||
name: str,
|
|
||||||
coordinator_type: str,
|
|
||||||
update_interval: timedelta,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
self.accuweather = accuweather
|
self.accuweather = accuweather
|
||||||
self.location_key = accuweather.location_key
|
self.location_key = accuweather.location_key
|
||||||
|
name = config_entry.data[CONF_NAME]
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert self.location_key is not None
|
assert self.location_key is not None
|
||||||
@@ -65,8 +75,8 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=config_entry,
|
config_entry=config_entry,
|
||||||
name=f"{name} ({coordinator_type})",
|
name=f"{name} (observation)",
|
||||||
update_interval=update_interval,
|
update_interval=UPDATE_INTERVAL_OBSERVATION,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_update_data(self) -> dict[str, Any]:
|
async def _async_update_data(self) -> dict[str, Any]:
|
||||||
@@ -80,29 +90,39 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||||||
translation_key="current_conditions_update_error",
|
translation_key="current_conditions_update_error",
|
||||||
translation_placeholders={"error": repr(error)},
|
translation_placeholders={"error": repr(error)},
|
||||||
) from error
|
) from error
|
||||||
|
except InvalidApiKeyError as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="auth_error",
|
||||||
|
translation_placeholders={"entry": self.config_entry.title},
|
||||||
|
) from err
|
||||||
|
|
||||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
class AccuWeatherForecastDataUpdateCoordinator(
|
||||||
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
|
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
|
||||||
):
|
):
|
||||||
"""Class to manage fetching AccuWeather data API."""
|
"""Base class for AccuWeather forecast."""
|
||||||
|
|
||||||
|
config_entry: AccuWeatherConfigEntry
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: AccuWeatherConfigEntry,
|
config_entry: AccuWeatherConfigEntry,
|
||||||
accuweather: AccuWeather,
|
accuweather: AccuWeather,
|
||||||
name: str,
|
|
||||||
coordinator_type: str,
|
coordinator_type: str,
|
||||||
update_interval: timedelta,
|
update_interval: timedelta,
|
||||||
|
fetch_method: Callable[..., Awaitable[list[dict[str, Any]]]],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
self.accuweather = accuweather
|
self.accuweather = accuweather
|
||||||
self.location_key = accuweather.location_key
|
self.location_key = accuweather.location_key
|
||||||
|
self._fetch_method = fetch_method
|
||||||
|
name = config_entry.data[CONF_NAME]
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert self.location_key is not None
|
assert self.location_key is not None
|
||||||
@@ -118,24 +138,71 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def _async_update_data(self) -> list[dict[str, Any]]:
|
async def _async_update_data(self) -> list[dict[str, Any]]:
|
||||||
"""Update data via library."""
|
"""Update forecast data via library."""
|
||||||
try:
|
try:
|
||||||
async with timeout(10):
|
async with timeout(10):
|
||||||
result = await self.accuweather.async_get_daily_forecast(
|
result = await self._fetch_method(language=self.hass.config.language)
|
||||||
language=self.hass.config.language
|
|
||||||
)
|
|
||||||
except EXCEPTIONS as error:
|
except EXCEPTIONS as error:
|
||||||
raise UpdateFailed(
|
raise UpdateFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="forecast_update_error",
|
translation_key="forecast_update_error",
|
||||||
translation_placeholders={"error": repr(error)},
|
translation_placeholders={"error": repr(error)},
|
||||||
) from error
|
) from error
|
||||||
|
except InvalidApiKeyError as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="auth_error",
|
||||||
|
translation_placeholders={"entry": self.config_entry.title},
|
||||||
|
) from err
|
||||||
|
|
||||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||||
|
AccuWeatherForecastDataUpdateCoordinator
|
||||||
|
):
|
||||||
|
"""Coordinator for daily forecast."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: AccuWeatherConfigEntry,
|
||||||
|
accuweather: AccuWeather,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
config_entry,
|
||||||
|
accuweather,
|
||||||
|
"daily forecast",
|
||||||
|
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||||
|
fetch_method=accuweather.async_get_daily_forecast,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||||
|
AccuWeatherForecastDataUpdateCoordinator
|
||||||
|
):
|
||||||
|
"""Coordinator for hourly forecast."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: AccuWeatherConfigEntry,
|
||||||
|
accuweather: AccuWeather,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
config_entry,
|
||||||
|
accuweather,
|
||||||
|
"hourly forecast",
|
||||||
|
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||||
|
fetch_method=accuweather.async_get_hourly_forecast,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
|
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
|
||||||
"""Get device info."""
|
"""Get device info."""
|
||||||
return DeviceInfo(
|
return DeviceInfo(
|
||||||
|
@@ -7,6 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["accuweather"],
|
"loggers": ["accuweather"],
|
||||||
"requirements": ["accuweather==4.2.1"],
|
"requirements": ["accuweather==4.2.1"]
|
||||||
"single_config_entry": true
|
|
||||||
}
|
}
|
||||||
|
@@ -7,6 +7,17 @@
|
|||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||||
"longitude": "[%key:common::config_flow::data::longitude%]"
|
"longitude": "[%key:common::config_flow::data::longitude%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"api_key": "API key generated in the AccuWeather APIs portal."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"data": {
|
||||||
|
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"api_key": "[%key:component::accuweather::config::step::user::data_description::api_key%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -17,6 +28,10 @@
|
|||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||||
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
|
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
|
||||||
|
},
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
@@ -236,6 +251,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
|
"auth_error": {
|
||||||
|
"message": "Authentication failed for {entry}, please update your API key"
|
||||||
|
},
|
||||||
"current_conditions_update_error": {
|
"current_conditions_update_error": {
|
||||||
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
||||||
},
|
},
|
||||||
|
@@ -45,6 +45,7 @@ from .coordinator import (
|
|||||||
AccuWeatherConfigEntry,
|
AccuWeatherConfigEntry,
|
||||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||||
AccuWeatherData,
|
AccuWeatherData,
|
||||||
|
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||||
AccuWeatherObservationDataUpdateCoordinator,
|
AccuWeatherObservationDataUpdateCoordinator,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -64,6 +65,7 @@ class AccuWeatherEntity(
|
|||||||
CoordinatorWeatherEntity[
|
CoordinatorWeatherEntity[
|
||||||
AccuWeatherObservationDataUpdateCoordinator,
|
AccuWeatherObservationDataUpdateCoordinator,
|
||||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||||
|
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||||
]
|
]
|
||||||
):
|
):
|
||||||
"""Define an AccuWeather entity."""
|
"""Define an AccuWeather entity."""
|
||||||
@@ -76,6 +78,7 @@ class AccuWeatherEntity(
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
observation_coordinator=accuweather_data.coordinator_observation,
|
observation_coordinator=accuweather_data.coordinator_observation,
|
||||||
daily_coordinator=accuweather_data.coordinator_daily_forecast,
|
daily_coordinator=accuweather_data.coordinator_daily_forecast,
|
||||||
|
hourly_coordinator=accuweather_data.coordinator_hourly_forecast,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||||
@@ -86,10 +89,13 @@ class AccuWeatherEntity(
|
|||||||
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
|
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
|
||||||
self._attr_attribution = ATTRIBUTION
|
self._attr_attribution = ATTRIBUTION
|
||||||
self._attr_device_info = accuweather_data.coordinator_observation.device_info
|
self._attr_device_info = accuweather_data.coordinator_observation.device_info
|
||||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
self._attr_supported_features = (
|
||||||
|
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
|
||||||
|
)
|
||||||
|
|
||||||
self.observation_coordinator = accuweather_data.coordinator_observation
|
self.observation_coordinator = accuweather_data.coordinator_observation
|
||||||
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
|
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
|
||||||
|
self.hourly_coordinator = accuweather_data.coordinator_hourly_forecast
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def condition(self) -> str | None:
|
def condition(self) -> str | None:
|
||||||
@@ -207,3 +213,32 @@ class AccuWeatherEntity(
|
|||||||
}
|
}
|
||||||
for item in self.daily_coordinator.data
|
for item in self.daily_coordinator.data
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||||
|
"""Return the hourly forecast in native units."""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
ATTR_FORECAST_TIME: utc_from_timestamp(
|
||||||
|
item["EpochDateTime"]
|
||||||
|
).isoformat(),
|
||||||
|
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCover"],
|
||||||
|
ATTR_FORECAST_HUMIDITY: item["RelativeHumidity"],
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP: item["Temperature"][ATTR_VALUE],
|
||||||
|
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperature"][
|
||||||
|
ATTR_VALUE
|
||||||
|
],
|
||||||
|
ATTR_FORECAST_NATIVE_PRECIPITATION: item["TotalLiquid"][ATTR_VALUE],
|
||||||
|
ATTR_FORECAST_PRECIPITATION_PROBABILITY: item[
|
||||||
|
"PrecipitationProbability"
|
||||||
|
],
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_SPEED: item["Wind"][ATTR_SPEED][ATTR_VALUE],
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: item["WindGust"][ATTR_SPEED][
|
||||||
|
ATTR_VALUE
|
||||||
|
],
|
||||||
|
ATTR_FORECAST_UV_INDEX: item["UVIndex"],
|
||||||
|
ATTR_FORECAST_WIND_BEARING: item["Wind"][ATTR_DIRECTION]["Degrees"],
|
||||||
|
ATTR_FORECAST_CONDITION: CONDITION_MAP.get(item["WeatherIcon"]),
|
||||||
|
}
|
||||||
|
for item in self.hourly_coordinator.data
|
||||||
|
]
|
||||||
|
@@ -3,10 +3,8 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
@@ -28,7 +26,6 @@ from .const import (
|
|||||||
ATTR_STRUCTURE,
|
ATTR_STRUCTURE,
|
||||||
ATTR_TASK_NAME,
|
ATTR_TASK_NAME,
|
||||||
DATA_COMPONENT,
|
DATA_COMPONENT,
|
||||||
DATA_IMAGES,
|
|
||||||
DATA_PREFERENCES,
|
DATA_PREFERENCES,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
SERVICE_GENERATE_DATA,
|
SERVICE_GENERATE_DATA,
|
||||||
@@ -42,7 +39,6 @@ from .task import (
|
|||||||
GenDataTaskResult,
|
GenDataTaskResult,
|
||||||
GenImageTask,
|
GenImageTask,
|
||||||
GenImageTaskResult,
|
GenImageTaskResult,
|
||||||
ImageData,
|
|
||||||
async_generate_data,
|
async_generate_data,
|
||||||
async_generate_image,
|
async_generate_image,
|
||||||
)
|
)
|
||||||
@@ -55,7 +51,6 @@ __all__ = [
|
|||||||
"GenDataTaskResult",
|
"GenDataTaskResult",
|
||||||
"GenImageTask",
|
"GenImageTask",
|
||||||
"GenImageTaskResult",
|
"GenImageTaskResult",
|
||||||
"ImageData",
|
|
||||||
"async_generate_data",
|
"async_generate_data",
|
||||||
"async_generate_image",
|
"async_generate_image",
|
||||||
"async_setup",
|
"async_setup",
|
||||||
@@ -94,10 +89,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||||
hass.data[DATA_COMPONENT] = entity_component
|
hass.data[DATA_COMPONENT] = entity_component
|
||||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||||
hass.data[DATA_IMAGES] = {}
|
|
||||||
await hass.data[DATA_PREFERENCES].async_load()
|
await hass.data[DATA_PREFERENCES].async_load()
|
||||||
async_setup_http(hass)
|
async_setup_http(hass)
|
||||||
hass.http.register_view(ImageView)
|
|
||||||
hass.services.async_register(
|
hass.services.async_register(
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
SERVICE_GENERATE_DATA,
|
SERVICE_GENERATE_DATA,
|
||||||
@@ -209,28 +202,3 @@ class AITaskPreferences:
|
|||||||
def as_dict(self) -> dict[str, str | None]:
|
def as_dict(self) -> dict[str, str | None]:
|
||||||
"""Get the current preferences."""
|
"""Get the current preferences."""
|
||||||
return {key: getattr(self, key) for key in self.KEYS}
|
return {key: getattr(self, key) for key in self.KEYS}
|
||||||
|
|
||||||
|
|
||||||
class ImageView(HomeAssistantView):
|
|
||||||
"""View to generated images."""
|
|
||||||
|
|
||||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
|
||||||
name = f"api:{DOMAIN}/images"
|
|
||||||
|
|
||||||
async def get(
|
|
||||||
self,
|
|
||||||
request: web.Request,
|
|
||||||
filename: str,
|
|
||||||
) -> web.Response:
|
|
||||||
"""Serve image."""
|
|
||||||
hass = request.app[KEY_HASS]
|
|
||||||
image_storage = hass.data[DATA_IMAGES]
|
|
||||||
image_data = image_storage.get(filename)
|
|
||||||
|
|
||||||
if image_data is None:
|
|
||||||
raise web.HTTPNotFound
|
|
||||||
|
|
||||||
return web.Response(
|
|
||||||
body=image_data.data,
|
|
||||||
content_type=image_data.mime_type,
|
|
||||||
)
|
|
||||||
|
@@ -8,19 +8,19 @@ from typing import TYPE_CHECKING, Final
|
|||||||
from homeassistant.util.hass_dict import HassKey
|
from homeassistant.util.hass_dict import HassKey
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from homeassistant.components.media_source import local_source
|
||||||
from homeassistant.helpers.entity_component import EntityComponent
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
|
|
||||||
from . import AITaskPreferences
|
from . import AITaskPreferences
|
||||||
from .entity import AITaskEntity
|
from .entity import AITaskEntity
|
||||||
from .task import ImageData
|
|
||||||
|
|
||||||
DOMAIN = "ai_task"
|
DOMAIN = "ai_task"
|
||||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||||
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
|
DATA_MEDIA_SOURCE: HassKey[local_source.LocalSource] = HassKey(f"{DOMAIN}_media_source")
|
||||||
|
|
||||||
|
IMAGE_DIR: Final = "image"
|
||||||
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
||||||
MAX_IMAGES = 20
|
|
||||||
|
|
||||||
SERVICE_GENERATE_DATA = "generate_data"
|
SERVICE_GENERATE_DATA = "generate_data"
|
||||||
SERVICE_GENERATE_IMAGE = "generate_image"
|
SERVICE_GENERATE_IMAGE = "generate_image"
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "ai_task",
|
"domain": "ai_task",
|
||||||
"name": "AI Task",
|
"name": "AI Task",
|
||||||
"after_dependencies": ["camera", "http"],
|
"after_dependencies": ["camera"],
|
||||||
"codeowners": ["@home-assistant/core"],
|
"codeowners": ["@home-assistant/core"],
|
||||||
"dependencies": ["conversation", "media_source"],
|
"dependencies": ["conversation", "media_source"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||||
|
@@ -2,89 +2,31 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import timedelta
|
from pathlib import Path
|
||||||
import logging
|
|
||||||
|
|
||||||
from homeassistant.components.http.auth import async_sign_path
|
from homeassistant.components.media_source import MediaSource, local_source
|
||||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
|
||||||
from homeassistant.components.media_source import (
|
|
||||||
BrowseMediaSource,
|
|
||||||
MediaSource,
|
|
||||||
MediaSourceItem,
|
|
||||||
PlayMedia,
|
|
||||||
Unresolvable,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
|
async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
|
||||||
"""Set up image media source."""
|
"""Set up local media source."""
|
||||||
_LOGGER.debug("Setting up image media source")
|
media_dirs = list(hass.config.media_dirs.values())
|
||||||
return ImageMediaSource(hass)
|
|
||||||
|
|
||||||
|
if not media_dirs:
|
||||||
class ImageMediaSource(MediaSource):
|
raise HomeAssistantError(
|
||||||
"""Provide images as media sources."""
|
"AI Task media source requires at least one media directory configured"
|
||||||
|
|
||||||
name: str = "AI Generated Images"
|
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
|
||||||
"""Initialize ImageMediaSource."""
|
|
||||||
super().__init__(DOMAIN)
|
|
||||||
self.hass = hass
|
|
||||||
|
|
||||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
|
||||||
"""Resolve media to a url."""
|
|
||||||
image_storage = self.hass.data[DATA_IMAGES]
|
|
||||||
image = image_storage.get(item.identifier)
|
|
||||||
|
|
||||||
if image is None:
|
|
||||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
|
||||||
|
|
||||||
return PlayMedia(
|
|
||||||
async_sign_path(
|
|
||||||
self.hass,
|
|
||||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
|
||||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
|
||||||
),
|
|
||||||
image.mime_type,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_browse_media(
|
media_dir = Path(media_dirs[0]) / DOMAIN / IMAGE_DIR
|
||||||
self,
|
|
||||||
item: MediaSourceItem,
|
|
||||||
) -> BrowseMediaSource:
|
|
||||||
"""Return media."""
|
|
||||||
if item.identifier:
|
|
||||||
raise BrowseError("Unknown item")
|
|
||||||
|
|
||||||
image_storage = self.hass.data[DATA_IMAGES]
|
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
|
||||||
|
hass,
|
||||||
children = [
|
DOMAIN,
|
||||||
BrowseMediaSource(
|
"AI Generated Images",
|
||||||
domain=DOMAIN,
|
{IMAGE_DIR: str(media_dir)},
|
||||||
identifier=filename,
|
f"/{DOMAIN}",
|
||||||
media_class=MediaClass.IMAGE,
|
)
|
||||||
media_content_type=image.mime_type,
|
return source
|
||||||
title=image.title or filename,
|
|
||||||
can_play=True,
|
|
||||||
can_expand=False,
|
|
||||||
)
|
|
||||||
for filename, image in image_storage.items()
|
|
||||||
]
|
|
||||||
|
|
||||||
return BrowseMediaSource(
|
|
||||||
domain=DOMAIN,
|
|
||||||
identifier=None,
|
|
||||||
media_class=MediaClass.APP,
|
|
||||||
media_content_type="",
|
|
||||||
title="AI Generated Images",
|
|
||||||
can_play=False,
|
|
||||||
can_expand=True,
|
|
||||||
children_media_class=MediaClass.IMAGE,
|
|
||||||
children=children,
|
|
||||||
)
|
|
||||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from functools import partial
|
import io
|
||||||
import mimetypes
|
import mimetypes
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tempfile
|
import tempfile
|
||||||
@@ -12,35 +12,33 @@ from typing import Any
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import camera, conversation, media_source
|
from homeassistant.components import camera, conversation, image, media_source
|
||||||
from homeassistant.components.http.auth import async_sign_path
|
from homeassistant.components.http.auth import async_sign_path
|
||||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import llm
|
from homeassistant.helpers import llm
|
||||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||||
from homeassistant.helpers.event import async_call_later
|
|
||||||
from homeassistant.helpers.network import get_url
|
|
||||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
DATA_COMPONENT,
|
DATA_COMPONENT,
|
||||||
DATA_IMAGES,
|
DATA_MEDIA_SOURCE,
|
||||||
DATA_PREFERENCES,
|
DATA_PREFERENCES,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
IMAGE_DIR,
|
||||||
IMAGE_EXPIRY_TIME,
|
IMAGE_EXPIRY_TIME,
|
||||||
MAX_IMAGES,
|
|
||||||
AITaskEntityFeature,
|
AITaskEntityFeature,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
def _save_camera_snapshot(image_data: camera.Image | image.Image) -> Path:
|
||||||
"""Save camera snapshot to temp file."""
|
"""Save camera snapshot to temp file."""
|
||||||
with tempfile.NamedTemporaryFile(
|
with tempfile.NamedTemporaryFile(
|
||||||
mode="wb",
|
mode="wb",
|
||||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
suffix=mimetypes.guess_extension(image_data.content_type, False),
|
||||||
delete=False,
|
delete=False,
|
||||||
) as temp_file:
|
) as temp_file:
|
||||||
temp_file.write(image.content)
|
temp_file.write(image_data.content)
|
||||||
return Path(temp_file.name)
|
return Path(temp_file.name)
|
||||||
|
|
||||||
|
|
||||||
@@ -56,26 +54,31 @@ async def _resolve_attachments(
|
|||||||
for attachment in attachments or []:
|
for attachment in attachments or []:
|
||||||
media_content_id = attachment["media_content_id"]
|
media_content_id = attachment["media_content_id"]
|
||||||
|
|
||||||
# Special case for camera media sources
|
# Special case for certain media sources
|
||||||
if media_content_id.startswith("media-source://camera/"):
|
for integration in camera, image:
|
||||||
# Extract entity_id from the media content ID
|
media_source_prefix = f"media-source://{integration.DOMAIN}/"
|
||||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
if not media_content_id.startswith(media_source_prefix):
|
||||||
|
continue
|
||||||
|
|
||||||
# Get snapshot from camera
|
# Extract entity_id from the media content ID
|
||||||
image = await camera.async_get_image(hass, entity_id)
|
entity_id = media_content_id.removeprefix(media_source_prefix)
|
||||||
|
|
||||||
|
# Get snapshot from entity
|
||||||
|
image_data = await integration.async_get_image(hass, entity_id)
|
||||||
|
|
||||||
temp_filename = await hass.async_add_executor_job(
|
temp_filename = await hass.async_add_executor_job(
|
||||||
_save_camera_snapshot, image
|
_save_camera_snapshot, image_data
|
||||||
)
|
)
|
||||||
created_files.append(temp_filename)
|
created_files.append(temp_filename)
|
||||||
|
|
||||||
resolved_attachments.append(
|
resolved_attachments.append(
|
||||||
conversation.Attachment(
|
conversation.Attachment(
|
||||||
media_content_id=media_content_id,
|
media_content_id=media_content_id,
|
||||||
mime_type=image.content_type,
|
mime_type=image_data.content_type,
|
||||||
path=temp_filename,
|
path=temp_filename,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
break
|
||||||
else:
|
else:
|
||||||
# Handle regular media sources
|
# Handle regular media sources
|
||||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||||
@@ -158,24 +161,6 @@ async def async_generate_data(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
|
|
||||||
"""Remove old images to keep the storage size under the limit."""
|
|
||||||
if num_to_remove <= 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
if num_to_remove >= len(image_storage):
|
|
||||||
image_storage.clear()
|
|
||||||
return
|
|
||||||
|
|
||||||
sorted_images = sorted(
|
|
||||||
image_storage.items(),
|
|
||||||
key=lambda item: item[1].timestamp,
|
|
||||||
)
|
|
||||||
|
|
||||||
for filename, _ in sorted_images[:num_to_remove]:
|
|
||||||
image_storage.pop(filename, None)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_generate_image(
|
async def async_generate_image(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
*,
|
*,
|
||||||
@@ -225,36 +210,34 @@ async def async_generate_image(
|
|||||||
if service_result.get("revised_prompt") is None:
|
if service_result.get("revised_prompt") is None:
|
||||||
service_result["revised_prompt"] = instructions
|
service_result["revised_prompt"] = instructions
|
||||||
|
|
||||||
image_storage = hass.data[DATA_IMAGES]
|
source = hass.data[DATA_MEDIA_SOURCE]
|
||||||
|
|
||||||
if len(image_storage) + 1 > MAX_IMAGES:
|
|
||||||
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
|
|
||||||
|
|
||||||
current_time = datetime.now()
|
current_time = datetime.now()
|
||||||
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
||||||
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
||||||
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
|
|
||||||
|
|
||||||
image_storage[filename] = ImageData(
|
image_file = ImageData(
|
||||||
data=image_data,
|
filename=f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}",
|
||||||
timestamp=int(current_time.timestamp()),
|
file=io.BytesIO(image_data),
|
||||||
mime_type=task_result.mime_type,
|
content_type=task_result.mime_type,
|
||||||
title=service_result["revised_prompt"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _purge_image(filename: str, now: datetime) -> None:
|
target_folder = media_source.MediaSourceItem.from_uri(
|
||||||
"""Remove image from storage."""
|
hass, f"media-source://{DOMAIN}/{IMAGE_DIR}", None
|
||||||
image_storage.pop(filename, None)
|
)
|
||||||
|
|
||||||
if IMAGE_EXPIRY_TIME > 0:
|
service_result["media_source_id"] = await source.async_upload_media(
|
||||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
target_folder, image_file
|
||||||
|
)
|
||||||
|
|
||||||
service_result["url"] = get_url(hass) + async_sign_path(
|
item = media_source.MediaSourceItem.from_uri(
|
||||||
|
hass, service_result["media_source_id"], None
|
||||||
|
)
|
||||||
|
service_result["url"] = async_sign_path(
|
||||||
hass,
|
hass,
|
||||||
f"/api/{DOMAIN}/images/{filename}",
|
(await source.async_resolve_media(item)).url,
|
||||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
timedelta(seconds=IMAGE_EXPIRY_TIME),
|
||||||
)
|
)
|
||||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
|
||||||
|
|
||||||
return service_result
|
return service_result
|
||||||
|
|
||||||
@@ -359,20 +342,8 @@ class GenImageTaskResult:
|
|||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
class ImageData:
|
class ImageData:
|
||||||
"""Image data for stored generated images."""
|
"""Implementation of media_source.local_source.UploadedFile protocol."""
|
||||||
|
|
||||||
data: bytes
|
filename: str
|
||||||
"""Raw image data."""
|
file: io.IOBase
|
||||||
|
content_type: str
|
||||||
timestamp: int
|
|
||||||
"""Timestamp when the image was generated, as a Unix timestamp."""
|
|
||||||
|
|
||||||
mime_type: str
|
|
||||||
"""MIME type of the image."""
|
|
||||||
|
|
||||||
title: str
|
|
||||||
"""Title of the image, usually the prompt used to generate it."""
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
"""Return image data as a string."""
|
|
||||||
return f"<ImageData {self.title}: {id(self)}>"
|
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from genie_partner_sdk.client import AladdinConnectClient
|
from genie_partner_sdk.client import AladdinConnectClient
|
||||||
from genie_partner_sdk.model import GarageDoor
|
|
||||||
|
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@@ -36,22 +35,7 @@ async def async_setup_entry(
|
|||||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||||
)
|
)
|
||||||
|
|
||||||
sdk_doors = await client.get_doors()
|
doors = await client.get_doors()
|
||||||
|
|
||||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
|
||||||
doors = [
|
|
||||||
GarageDoor(
|
|
||||||
{
|
|
||||||
"device_id": door.device_id,
|
|
||||||
"door_number": door.door_number,
|
|
||||||
"name": door.name,
|
|
||||||
"status": door.status,
|
|
||||||
"link_status": door.link_status,
|
|
||||||
"battery_level": door.battery_level,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
for door in sdk_doors
|
|
||||||
]
|
|
||||||
|
|
||||||
entry.runtime_data = {
|
entry.runtime_data = {
|
||||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||||
|
@@ -41,4 +41,10 @@ class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
|
|||||||
async def _async_update_data(self) -> GarageDoor:
|
async def _async_update_data(self) -> GarageDoor:
|
||||||
"""Fetch data from the Aladdin Connect API."""
|
"""Fetch data from the Aladdin Connect API."""
|
||||||
await self.client.update_door(self.data.device_id, self.data.door_number)
|
await self.client.update_door(self.data.device_id, self.data.door_number)
|
||||||
|
self.data.status = self.client.get_door_status(
|
||||||
|
self.data.device_id, self.data.door_number
|
||||||
|
)
|
||||||
|
self.data.battery_level = self.client.get_battery_status(
|
||||||
|
self.data.device_id, self.data.door_number
|
||||||
|
)
|
||||||
return self.data
|
return self.data
|
||||||
|
@@ -49,7 +49,9 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
|
|||||||
@property
|
@property
|
||||||
def is_closed(self) -> bool | None:
|
def is_closed(self) -> bool | None:
|
||||||
"""Update is closed attribute."""
|
"""Update is closed attribute."""
|
||||||
return self.coordinator.data.status == "closed"
|
if (status := self.coordinator.data.status) is None:
|
||||||
|
return None
|
||||||
|
return status == "closed"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_closing(self) -> bool | None:
|
def is_closing(self) -> bool | None:
|
||||||
|
@@ -4,8 +4,13 @@
|
|||||||
"codeowners": ["@swcloudgenie"],
|
"codeowners": ["@swcloudgenie"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["application_credentials"],
|
"dependencies": ["application_credentials"],
|
||||||
|
"dhcp": [
|
||||||
|
{
|
||||||
|
"hostname": "gdocntl-*"
|
||||||
|
}
|
||||||
|
],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["genie-partner-sdk==1.0.10"]
|
"requirements": ["genie-partner-sdk==1.0.11"]
|
||||||
}
|
}
|
||||||
|
@@ -7,6 +7,9 @@
|
|||||||
"reauth_confirm": {
|
"reauth_confirm": {
|
||||||
"title": "[%key:common::config_flow::title::reauth%]",
|
"title": "[%key:common::config_flow::title::reauth%]",
|
||||||
"description": "Aladdin Connect needs to re-authenticate your account"
|
"description": "Aladdin Connect needs to re-authenticate your account"
|
||||||
|
},
|
||||||
|
"oauth_discovery": {
|
||||||
|
"description": "Home Assistant has found an Aladdin Connect device on your network. Press **Submit** to continue setting up Aladdin Connect."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
|
@@ -61,7 +61,7 @@ ALARM_SERVICE_SCHEMA: Final = make_entity_service_schema(
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Track states and offer events for sensors."""
|
"""Set up the alarm control panel component."""
|
||||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AlarmControlPanelEntity](
|
component = hass.data[DATA_COMPONENT] = EntityComponent[AlarmControlPanelEntity](
|
||||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||||
)
|
)
|
||||||
|
@@ -107,7 +107,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
try:
|
try:
|
||||||
await validate_input(self.hass, {**reauth_entry.data, **user_input})
|
data = await validate_input(
|
||||||
|
self.hass, {**reauth_entry.data, **user_input}
|
||||||
|
)
|
||||||
except CannotConnect:
|
except CannotConnect:
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except (CannotAuthenticate, TypeError):
|
except (CannotAuthenticate, TypeError):
|
||||||
@@ -119,8 +121,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
reauth_entry,
|
reauth_entry,
|
||||||
data={
|
data={
|
||||||
CONF_USERNAME: entry_data[CONF_USERNAME],
|
CONF_USERNAME: entry_data[CONF_USERNAME],
|
||||||
CONF_PASSWORD: entry_data[CONF_PASSWORD],
|
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||||
CONF_CODE: user_input[CONF_CODE],
|
CONF_CODE: user_input[CONF_CODE],
|
||||||
|
CONF_LOGIN_DATA: data,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -41,7 +41,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
|||||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE:
|
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
call_ids = await async_extract_entity_ids(hass, call)
|
call_ids = await async_extract_entity_ids(call)
|
||||||
entity_ids = []
|
entity_ids = []
|
||||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]:
|
for entity_id in hass.data[DATA_AMCREST][CAMERAS]:
|
||||||
if entity_id not in call_ids:
|
if entity_id not in call_ids:
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
"domain": "analytics",
|
"domain": "analytics",
|
||||||
"name": "Analytics",
|
"name": "Analytics",
|
||||||
"after_dependencies": ["energy", "hassio", "recorder"],
|
"after_dependencies": ["energy", "hassio", "recorder"],
|
||||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
"codeowners": ["@home-assistant/core"],
|
||||||
"dependencies": ["api", "websocket_api", "http"],
|
"dependencies": ["api", "websocket_api", "http"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
|
@@ -33,9 +33,11 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import Event, HomeAssistant
|
from homeassistant.core import Event, HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.device_registry import format_mac
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
from homeassistant.helpers.storage import STORAGE_DIR
|
from homeassistant.helpers.storage import STORAGE_DIR
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_ADB_SERVER_IP,
|
CONF_ADB_SERVER_IP,
|
||||||
@@ -46,10 +48,12 @@ from .const import (
|
|||||||
DEFAULT_ADB_SERVER_PORT,
|
DEFAULT_ADB_SERVER_PORT,
|
||||||
DEVICE_ANDROIDTV,
|
DEVICE_ANDROIDTV,
|
||||||
DEVICE_FIRETV,
|
DEVICE_FIRETV,
|
||||||
|
DOMAIN,
|
||||||
PROP_ETHMAC,
|
PROP_ETHMAC,
|
||||||
PROP_WIFIMAC,
|
PROP_WIFIMAC,
|
||||||
SIGNAL_CONFIG_ENTITY,
|
SIGNAL_CONFIG_ENTITY,
|
||||||
)
|
)
|
||||||
|
from .services import async_setup_services
|
||||||
|
|
||||||
ADB_PYTHON_EXCEPTIONS: tuple = (
|
ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||||
AdbTimeoutError,
|
AdbTimeoutError,
|
||||||
@@ -63,6 +67,8 @@ ADB_PYTHON_EXCEPTIONS: tuple = (
|
|||||||
)
|
)
|
||||||
ADB_TCP_EXCEPTIONS: tuple = (ConnectionResetError, RuntimeError)
|
ADB_TCP_EXCEPTIONS: tuple = (ConnectionResetError, RuntimeError)
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||||
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||||
|
|
||||||
@@ -188,6 +194,12 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up the Android TV / Fire TV integration."""
|
||||||
|
async_setup_services(hass)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool:
|
||||||
"""Set up Android Debug Bridge platform."""
|
"""Set up Android Debug Bridge platform."""
|
||||||
|
|
||||||
|
@@ -8,7 +8,6 @@ import logging
|
|||||||
|
|
||||||
from androidtv.constants import APPS, KEYS
|
from androidtv.constants import APPS, KEYS
|
||||||
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.components import persistent_notification
|
from homeassistant.components import persistent_notification
|
||||||
from homeassistant.components.media_player import (
|
from homeassistant.components.media_player import (
|
||||||
@@ -17,9 +16,7 @@ from homeassistant.components.media_player import (
|
|||||||
MediaPlayerEntityFeature,
|
MediaPlayerEntityFeature,
|
||||||
MediaPlayerState,
|
MediaPlayerState,
|
||||||
)
|
)
|
||||||
from homeassistant.const import ATTR_COMMAND
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.util.dt import utcnow
|
from homeassistant.util.dt import utcnow
|
||||||
@@ -39,19 +36,10 @@ from .const import (
|
|||||||
SIGNAL_CONFIG_ENTITY,
|
SIGNAL_CONFIG_ENTITY,
|
||||||
)
|
)
|
||||||
from .entity import AndroidTVEntity, adb_decorator
|
from .entity import AndroidTVEntity, adb_decorator
|
||||||
|
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT, SERVICE_LEARN_SENDEVENT
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ATTR_ADB_RESPONSE = "adb_response"
|
|
||||||
ATTR_DEVICE_PATH = "device_path"
|
|
||||||
ATTR_HDMI_INPUT = "hdmi_input"
|
|
||||||
ATTR_LOCAL_PATH = "local_path"
|
|
||||||
|
|
||||||
SERVICE_ADB_COMMAND = "adb_command"
|
|
||||||
SERVICE_DOWNLOAD = "download"
|
|
||||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
|
||||||
SERVICE_UPLOAD = "upload"
|
|
||||||
|
|
||||||
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
||||||
ANDROIDTV_STATES = {
|
ANDROIDTV_STATES = {
|
||||||
"off": MediaPlayerState.OFF,
|
"off": MediaPlayerState.OFF,
|
||||||
@@ -77,32 +65,6 @@ async def async_setup_entry(
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
platform = entity_platform.async_get_current_platform()
|
|
||||||
platform.async_register_entity_service(
|
|
||||||
SERVICE_ADB_COMMAND,
|
|
||||||
{vol.Required(ATTR_COMMAND): cv.string},
|
|
||||||
"adb_command",
|
|
||||||
)
|
|
||||||
platform.async_register_entity_service(
|
|
||||||
SERVICE_LEARN_SENDEVENT, None, "learn_sendevent"
|
|
||||||
)
|
|
||||||
platform.async_register_entity_service(
|
|
||||||
SERVICE_DOWNLOAD,
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
|
||||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
|
||||||
},
|
|
||||||
"service_download",
|
|
||||||
)
|
|
||||||
platform.async_register_entity_service(
|
|
||||||
SERVICE_UPLOAD,
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
|
||||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
|
||||||
},
|
|
||||||
"service_upload",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||||
"""Representation of an Android or Fire TV device."""
|
"""Representation of an Android or Fire TV device."""
|
||||||
|
66
homeassistant/components/androidtv/services.py
Normal file
66
homeassistant/components/androidtv/services.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
"""Services for Android/Fire TV devices."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||||
|
from homeassistant.const import ATTR_COMMAND
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers import config_validation as cv, service
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
ATTR_ADB_RESPONSE = "adb_response"
|
||||||
|
ATTR_DEVICE_PATH = "device_path"
|
||||||
|
ATTR_HDMI_INPUT = "hdmi_input"
|
||||||
|
ATTR_LOCAL_PATH = "local_path"
|
||||||
|
|
||||||
|
SERVICE_ADB_COMMAND = "adb_command"
|
||||||
|
SERVICE_DOWNLOAD = "download"
|
||||||
|
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||||
|
SERVICE_UPLOAD = "upload"
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_setup_services(hass: HomeAssistant) -> None:
|
||||||
|
"""Register the Android TV / Fire TV services."""
|
||||||
|
|
||||||
|
service.async_register_platform_entity_service(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_ADB_COMMAND,
|
||||||
|
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||||
|
schema={vol.Required(ATTR_COMMAND): cv.string},
|
||||||
|
func="adb_command",
|
||||||
|
)
|
||||||
|
service.async_register_platform_entity_service(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_LEARN_SENDEVENT,
|
||||||
|
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||||
|
schema=None,
|
||||||
|
func="learn_sendevent",
|
||||||
|
)
|
||||||
|
service.async_register_platform_entity_service(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_DOWNLOAD,
|
||||||
|
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||||
|
schema={
|
||||||
|
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||||
|
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||||
|
},
|
||||||
|
func="service_download",
|
||||||
|
)
|
||||||
|
service.async_register_platform_entity_service(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_UPLOAD,
|
||||||
|
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||||
|
schema={
|
||||||
|
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||||
|
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||||
|
},
|
||||||
|
func="service_upload",
|
||||||
|
)
|
@@ -16,7 +16,7 @@ from .coordinator import (
|
|||||||
AOSmithStatusCoordinator,
|
AOSmithStatusCoordinator,
|
||||||
)
|
)
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WATER_HEATER]
|
PLATFORMS: list[Platform] = [Platform.SELECT, Platform.SENSOR, Platform.WATER_HEATER]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool:
|
||||||
|
@@ -1,5 +1,10 @@
|
|||||||
{
|
{
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"select": {
|
||||||
|
"hot_water_plus_level": {
|
||||||
|
"default": "mdi:water-plus"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"hot_water_availability": {
|
"hot_water_availability": {
|
||||||
"default": "mdi:water-thermometer"
|
"default": "mdi:water-thermometer"
|
||||||
|
70
homeassistant/components/aosmith/select.py
Normal file
70
homeassistant/components/aosmith/select.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
"""The select platform for the A. O. Smith integration."""
|
||||||
|
|
||||||
|
from homeassistant.components.select import SelectEntity
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from . import AOSmithConfigEntry
|
||||||
|
from .coordinator import AOSmithStatusCoordinator
|
||||||
|
from .entity import AOSmithStatusEntity
|
||||||
|
|
||||||
|
HWP_LEVEL_HA_TO_AOSMITH = {
|
||||||
|
"off": 0,
|
||||||
|
"level1": 1,
|
||||||
|
"level2": 2,
|
||||||
|
"level3": 3,
|
||||||
|
}
|
||||||
|
HWP_LEVEL_AOSMITH_TO_HA = {value: key for key, value in HWP_LEVEL_HA_TO_AOSMITH.items()}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AOSmithConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up A. O. Smith select platform."""
|
||||||
|
data = entry.runtime_data
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
AOSmithHotWaterPlusSelectEntity(data.status_coordinator, device.junction_id)
|
||||||
|
for device in data.status_coordinator.data.values()
|
||||||
|
if device.supports_hot_water_plus
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AOSmithHotWaterPlusSelectEntity(AOSmithStatusEntity, SelectEntity):
|
||||||
|
"""Class for the Hot Water+ select entity."""
|
||||||
|
|
||||||
|
_attr_translation_key = "hot_water_plus_level"
|
||||||
|
_attr_options = list(HWP_LEVEL_HA_TO_AOSMITH)
|
||||||
|
|
||||||
|
def __init__(self, coordinator: AOSmithStatusCoordinator, junction_id: str) -> None:
|
||||||
|
"""Initialize the entity."""
|
||||||
|
super().__init__(coordinator, junction_id)
|
||||||
|
self._attr_unique_id = f"hot_water_plus_level_{junction_id}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suggested_object_id(self) -> str | None:
|
||||||
|
"""Override the suggested object id to make '+' get converted to 'plus' in the entity id."""
|
||||||
|
return "hot_water_plus_level"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_option(self) -> str | None:
|
||||||
|
"""Return the current Hot Water+ mode."""
|
||||||
|
hot_water_plus_level = self.device.status.hot_water_plus_level
|
||||||
|
return (
|
||||||
|
None
|
||||||
|
if hot_water_plus_level is None
|
||||||
|
else HWP_LEVEL_AOSMITH_TO_HA.get(hot_water_plus_level)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_select_option(self, option: str) -> None:
|
||||||
|
"""Set the Hot Water+ mode."""
|
||||||
|
aosmith_hwp_level = HWP_LEVEL_HA_TO_AOSMITH[option]
|
||||||
|
await self.client.update_mode(
|
||||||
|
junction_id=self.junction_id,
|
||||||
|
mode=self.device.status.current_mode,
|
||||||
|
hot_water_plus_level=aosmith_hwp_level,
|
||||||
|
)
|
||||||
|
|
||||||
|
await self.coordinator.async_request_refresh()
|
@@ -26,6 +26,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"select": {
|
||||||
|
"hot_water_plus_level": {
|
||||||
|
"name": "Hot Water+ level",
|
||||||
|
"state": {
|
||||||
|
"off": "[%key:common::state::off%]",
|
||||||
|
"level1": "Level 1",
|
||||||
|
"level2": "Level 2",
|
||||||
|
"level3": "Level 3"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"hot_water_availability": {
|
"hot_water_availability": {
|
||||||
"name": "Hot water availability"
|
"name": "Hot water availability"
|
||||||
|
@@ -7,5 +7,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["apcaccess"],
|
"loggers": ["apcaccess"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioapcaccess==0.4.2"]
|
"requirements": ["aioapcaccess==1.0.0"]
|
||||||
}
|
}
|
||||||
|
@@ -395,6 +395,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
"upsmode": SensorEntityDescription(
|
"upsmode": SensorEntityDescription(
|
||||||
key="upsmode",
|
key="upsmode",
|
||||||
translation_key="ups_mode",
|
translation_key="ups_mode",
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
),
|
),
|
||||||
"upsname": SensorEntityDescription(
|
"upsname": SensorEntityDescription(
|
||||||
key="upsname",
|
key="upsname",
|
||||||
@@ -466,7 +467,10 @@ async def async_setup_entry(
|
|||||||
# periodical (or manual) self test since last daemon restart. It might not be available
|
# periodical (or manual) self test since last daemon restart. It might not be available
|
||||||
# when we set up the integration, and we do not know if it would ever be available. Here we
|
# when we set up the integration, and we do not know if it would ever be available. Here we
|
||||||
# add it anyway and mark it as unknown initially.
|
# add it anyway and mark it as unknown initially.
|
||||||
for resource in available_resources | {LAST_S_TEST}:
|
#
|
||||||
|
# We also sort the resources to ensure the order of entities created is deterministic since
|
||||||
|
# "APCMODEL" and "MODEL" resources map to the same "Model" name.
|
||||||
|
for resource in sorted(available_resources | {LAST_S_TEST}):
|
||||||
if resource not in SENSORS:
|
if resource not in SENSORS:
|
||||||
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
||||||
continue
|
continue
|
||||||
|
@@ -103,6 +103,7 @@ async def async_pipeline_from_audio_stream(
|
|||||||
wake_word_settings: WakeWordSettings | None = None,
|
wake_word_settings: WakeWordSettings | None = None,
|
||||||
audio_settings: AudioSettings | None = None,
|
audio_settings: AudioSettings | None = None,
|
||||||
device_id: str | None = None,
|
device_id: str | None = None,
|
||||||
|
satellite_id: str | None = None,
|
||||||
start_stage: PipelineStage = PipelineStage.STT,
|
start_stage: PipelineStage = PipelineStage.STT,
|
||||||
end_stage: PipelineStage = PipelineStage.TTS,
|
end_stage: PipelineStage = PipelineStage.TTS,
|
||||||
conversation_extra_system_prompt: str | None = None,
|
conversation_extra_system_prompt: str | None = None,
|
||||||
@@ -115,6 +116,7 @@ async def async_pipeline_from_audio_stream(
|
|||||||
pipeline_input = PipelineInput(
|
pipeline_input = PipelineInput(
|
||||||
session=session,
|
session=session,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
|
satellite_id=satellite_id,
|
||||||
stt_metadata=stt_metadata,
|
stt_metadata=stt_metadata,
|
||||||
stt_stream=stt_stream,
|
stt_stream=stt_stream,
|
||||||
wake_word_phrase=wake_word_phrase,
|
wake_word_phrase=wake_word_phrase,
|
||||||
|
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
Binary file not shown.
@@ -1,5 +1,7 @@
|
|||||||
"""Constants for the Assist pipeline integration."""
|
"""Constants for the Assist pipeline integration."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
DOMAIN = "assist_pipeline"
|
DOMAIN = "assist_pipeline"
|
||||||
|
|
||||||
DATA_CONFIG = f"{DOMAIN}.config"
|
DATA_CONFIG = f"{DOMAIN}.config"
|
||||||
@@ -23,3 +25,5 @@ SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz
|
|||||||
BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit
|
BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit
|
||||||
|
|
||||||
OPTION_PREFERRED = "preferred"
|
OPTION_PREFERRED = "preferred"
|
||||||
|
|
||||||
|
ACKNOWLEDGE_PATH = Path(__file__).parent / "acknowledge.mp3"
|
||||||
|
@@ -23,7 +23,12 @@ from homeassistant.components import conversation, stt, tts, wake_word, websocke
|
|||||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||||
from homeassistant.core import Context, HomeAssistant, callback
|
from homeassistant.core import Context, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import chat_session, intent
|
from homeassistant.helpers import (
|
||||||
|
chat_session,
|
||||||
|
device_registry as dr,
|
||||||
|
entity_registry as er,
|
||||||
|
intent,
|
||||||
|
)
|
||||||
from homeassistant.helpers.collection import (
|
from homeassistant.helpers.collection import (
|
||||||
CHANGE_UPDATED,
|
CHANGE_UPDATED,
|
||||||
CollectionError,
|
CollectionError,
|
||||||
@@ -45,6 +50,7 @@ from homeassistant.util.limited_size_dict import LimitedSizeDict
|
|||||||
|
|
||||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||||
from .const import (
|
from .const import (
|
||||||
|
ACKNOWLEDGE_PATH,
|
||||||
BYTES_PER_CHUNK,
|
BYTES_PER_CHUNK,
|
||||||
CONF_DEBUG_RECORDING_DIR,
|
CONF_DEBUG_RECORDING_DIR,
|
||||||
DATA_CONFIG,
|
DATA_CONFIG,
|
||||||
@@ -113,6 +119,7 @@ PIPELINE_FIELDS: VolDictType = {
|
|||||||
vol.Required("wake_word_entity"): vol.Any(str, None),
|
vol.Required("wake_word_entity"): vol.Any(str, None),
|
||||||
vol.Required("wake_word_id"): vol.Any(str, None),
|
vol.Required("wake_word_id"): vol.Any(str, None),
|
||||||
vol.Optional("prefer_local_intents"): bool,
|
vol.Optional("prefer_local_intents"): bool,
|
||||||
|
vol.Optional("acknowledge_media_id"): str,
|
||||||
}
|
}
|
||||||
|
|
||||||
STORED_PIPELINE_RUNS = 10
|
STORED_PIPELINE_RUNS = 10
|
||||||
@@ -583,6 +590,9 @@ class PipelineRun:
|
|||||||
_device_id: str | None = None
|
_device_id: str | None = None
|
||||||
"""Optional device id set during run start."""
|
"""Optional device id set during run start."""
|
||||||
|
|
||||||
|
_satellite_id: str | None = None
|
||||||
|
"""Optional satellite id set during run start."""
|
||||||
|
|
||||||
_conversation_data: PipelineConversationData | None = None
|
_conversation_data: PipelineConversationData | None = None
|
||||||
"""Data tied to the conversation ID."""
|
"""Data tied to the conversation ID."""
|
||||||
|
|
||||||
@@ -636,9 +646,12 @@ class PipelineRun:
|
|||||||
return
|
return
|
||||||
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
|
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
|
||||||
|
|
||||||
def start(self, conversation_id: str, device_id: str | None) -> None:
|
def start(
|
||||||
|
self, conversation_id: str, device_id: str | None, satellite_id: str | None
|
||||||
|
) -> None:
|
||||||
"""Emit run start event."""
|
"""Emit run start event."""
|
||||||
self._device_id = device_id
|
self._device_id = device_id
|
||||||
|
self._satellite_id = satellite_id
|
||||||
self._start_debug_recording_thread()
|
self._start_debug_recording_thread()
|
||||||
|
|
||||||
data: dict[str, Any] = {
|
data: dict[str, Any] = {
|
||||||
@@ -646,6 +659,8 @@ class PipelineRun:
|
|||||||
"language": self.language,
|
"language": self.language,
|
||||||
"conversation_id": conversation_id,
|
"conversation_id": conversation_id,
|
||||||
}
|
}
|
||||||
|
if satellite_id is not None:
|
||||||
|
data["satellite_id"] = satellite_id
|
||||||
if self.runner_data is not None:
|
if self.runner_data is not None:
|
||||||
data["runner_data"] = self.runner_data
|
data["runner_data"] = self.runner_data
|
||||||
if self.tts_stream:
|
if self.tts_stream:
|
||||||
@@ -1057,10 +1072,12 @@ class PipelineRun:
|
|||||||
self,
|
self,
|
||||||
intent_input: str,
|
intent_input: str,
|
||||||
conversation_id: str,
|
conversation_id: str,
|
||||||
device_id: str | None,
|
|
||||||
conversation_extra_system_prompt: str | None,
|
conversation_extra_system_prompt: str | None,
|
||||||
) -> str:
|
) -> tuple[str, bool]:
|
||||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
"""Run intent recognition portion of pipeline.
|
||||||
|
|
||||||
|
Returns (speech, all_targets_in_satellite_area).
|
||||||
|
"""
|
||||||
if self.intent_agent is None or self._conversation_data is None:
|
if self.intent_agent is None or self._conversation_data is None:
|
||||||
raise RuntimeError("Recognize intent was not prepared")
|
raise RuntimeError("Recognize intent was not prepared")
|
||||||
|
|
||||||
@@ -1088,7 +1105,8 @@ class PipelineRun:
|
|||||||
"language": input_language,
|
"language": input_language,
|
||||||
"intent_input": intent_input,
|
"intent_input": intent_input,
|
||||||
"conversation_id": conversation_id,
|
"conversation_id": conversation_id,
|
||||||
"device_id": device_id,
|
"device_id": self._device_id,
|
||||||
|
"satellite_id": self._satellite_id,
|
||||||
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@@ -1099,7 +1117,8 @@ class PipelineRun:
|
|||||||
text=intent_input,
|
text=intent_input,
|
||||||
context=self.context,
|
context=self.context,
|
||||||
conversation_id=conversation_id,
|
conversation_id=conversation_id,
|
||||||
device_id=device_id,
|
device_id=self._device_id,
|
||||||
|
satellite_id=self._satellite_id,
|
||||||
language=input_language,
|
language=input_language,
|
||||||
agent_id=self.intent_agent.id,
|
agent_id=self.intent_agent.id,
|
||||||
extra_system_prompt=conversation_extra_system_prompt,
|
extra_system_prompt=conversation_extra_system_prompt,
|
||||||
@@ -1107,6 +1126,7 @@ class PipelineRun:
|
|||||||
|
|
||||||
agent_id = self.intent_agent.id
|
agent_id = self.intent_agent.id
|
||||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||||
|
all_targets_in_satellite_area = False
|
||||||
intent_response: intent.IntentResponse | None = None
|
intent_response: intent.IntentResponse | None = None
|
||||||
if not processed_locally and not self._intent_agent_only:
|
if not processed_locally and not self._intent_agent_only:
|
||||||
# Sentence triggers override conversation agent
|
# Sentence triggers override conversation agent
|
||||||
@@ -1269,6 +1289,7 @@ class PipelineRun:
|
|||||||
text=user_input.text,
|
text=user_input.text,
|
||||||
conversation_id=user_input.conversation_id,
|
conversation_id=user_input.conversation_id,
|
||||||
device_id=user_input.device_id,
|
device_id=user_input.device_id,
|
||||||
|
satellite_id=user_input.satellite_id,
|
||||||
context=user_input.context,
|
context=user_input.context,
|
||||||
language=user_input.language,
|
language=user_input.language,
|
||||||
agent_id=user_input.agent_id,
|
agent_id=user_input.agent_id,
|
||||||
@@ -1280,6 +1301,17 @@ class PipelineRun:
|
|||||||
if tts_input_stream and self._streamed_response_text:
|
if tts_input_stream and self._streamed_response_text:
|
||||||
tts_input_stream.put_nowait(None)
|
tts_input_stream.put_nowait(None)
|
||||||
|
|
||||||
|
if agent_id == conversation.HOME_ASSISTANT_AGENT:
|
||||||
|
# Check if all targeted entities were in the same area as
|
||||||
|
# the satellite device.
|
||||||
|
# If so, the satellite should respond with an acknowledge beep
|
||||||
|
# instead of a full response.
|
||||||
|
all_targets_in_satellite_area = (
|
||||||
|
self._get_all_targets_in_satellite_area(
|
||||||
|
conversation_result.response, self._device_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as src_error:
|
except Exception as src_error:
|
||||||
_LOGGER.exception("Unexpected error during intent recognition")
|
_LOGGER.exception("Unexpected error during intent recognition")
|
||||||
raise IntentRecognitionError(
|
raise IntentRecognitionError(
|
||||||
@@ -1302,7 +1334,45 @@ class PipelineRun:
|
|||||||
if conversation_result.continue_conversation:
|
if conversation_result.continue_conversation:
|
||||||
self._conversation_data.continue_conversation_agent = agent_id
|
self._conversation_data.continue_conversation_agent = agent_id
|
||||||
|
|
||||||
return speech
|
return (speech, all_targets_in_satellite_area)
|
||||||
|
|
||||||
|
def _get_all_targets_in_satellite_area(
|
||||||
|
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||||
|
) -> bool:
|
||||||
|
"""Return true if all targeted entities were in the same area as the device."""
|
||||||
|
if (
|
||||||
|
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||||
|
or (not intent_response.matched_states)
|
||||||
|
or (not device_id)
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
device_registry = dr.async_get(self.hass)
|
||||||
|
|
||||||
|
if (not (device := device_registry.async_get(device_id))) or (
|
||||||
|
not device.area_id
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
entity_registry = er.async_get(self.hass)
|
||||||
|
for state in intent_response.matched_states:
|
||||||
|
entity = entity_registry.async_get(state.entity_id)
|
||||||
|
if not entity:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if (entity_area_id := entity.area_id) is None:
|
||||||
|
if (entity.device_id is None) or (
|
||||||
|
(entity_device := device_registry.async_get(entity.device_id))
|
||||||
|
is None
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
entity_area_id = entity_device.area_id
|
||||||
|
|
||||||
|
if entity_area_id != device.area_id:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
async def prepare_text_to_speech(self) -> None:
|
async def prepare_text_to_speech(self) -> None:
|
||||||
"""Prepare text-to-speech."""
|
"""Prepare text-to-speech."""
|
||||||
@@ -1340,7 +1410,9 @@ class PipelineRun:
|
|||||||
),
|
),
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
async def text_to_speech(self, tts_input: str) -> None:
|
async def text_to_speech(
|
||||||
|
self, tts_input: str, override_media_path: Path | None = None
|
||||||
|
) -> None:
|
||||||
"""Run text-to-speech portion of pipeline."""
|
"""Run text-to-speech portion of pipeline."""
|
||||||
assert self.tts_stream is not None
|
assert self.tts_stream is not None
|
||||||
|
|
||||||
@@ -1352,11 +1424,14 @@ class PipelineRun:
|
|||||||
"language": self.pipeline.tts_language,
|
"language": self.pipeline.tts_language,
|
||||||
"voice": self.pipeline.tts_voice,
|
"voice": self.pipeline.tts_voice,
|
||||||
"tts_input": tts_input,
|
"tts_input": tts_input,
|
||||||
|
"acknowledge_override": override_media_path is not None,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if not self._streamed_response_text:
|
if override_media_path:
|
||||||
|
self.tts_stream.async_override_result(override_media_path)
|
||||||
|
elif not self._streamed_response_text:
|
||||||
self.tts_stream.async_set_message(tts_input)
|
self.tts_stream.async_set_message(tts_input)
|
||||||
|
|
||||||
tts_output = {
|
tts_output = {
|
||||||
@@ -1567,10 +1642,15 @@ class PipelineInput:
|
|||||||
device_id: str | None = None
|
device_id: str | None = None
|
||||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||||
|
|
||||||
|
satellite_id: str | None = None
|
||||||
|
"""Identifier of the satellite that is processing the input/output of the pipeline."""
|
||||||
|
|
||||||
async def execute(self) -> None:
|
async def execute(self) -> None:
|
||||||
"""Run pipeline."""
|
"""Run pipeline."""
|
||||||
self.run.start(
|
self.run.start(
|
||||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
conversation_id=self.session.conversation_id,
|
||||||
|
device_id=self.device_id,
|
||||||
|
satellite_id=self.satellite_id,
|
||||||
)
|
)
|
||||||
current_stage: PipelineStage | None = self.run.start_stage
|
current_stage: PipelineStage | None = self.run.start_stage
|
||||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||||
@@ -1649,17 +1729,20 @@ class PipelineInput:
|
|||||||
|
|
||||||
if self.run.end_stage != PipelineStage.STT:
|
if self.run.end_stage != PipelineStage.STT:
|
||||||
tts_input = self.tts_input
|
tts_input = self.tts_input
|
||||||
|
all_targets_in_satellite_area = False
|
||||||
|
|
||||||
if current_stage == PipelineStage.INTENT:
|
if current_stage == PipelineStage.INTENT:
|
||||||
# intent-recognition
|
# intent-recognition
|
||||||
assert intent_input is not None
|
assert intent_input is not None
|
||||||
tts_input = await self.run.recognize_intent(
|
(
|
||||||
|
tts_input,
|
||||||
|
all_targets_in_satellite_area,
|
||||||
|
) = await self.run.recognize_intent(
|
||||||
intent_input,
|
intent_input,
|
||||||
self.session.conversation_id,
|
self.session.conversation_id,
|
||||||
self.device_id,
|
|
||||||
self.conversation_extra_system_prompt,
|
self.conversation_extra_system_prompt,
|
||||||
)
|
)
|
||||||
if tts_input.strip():
|
if all_targets_in_satellite_area or tts_input.strip():
|
||||||
current_stage = PipelineStage.TTS
|
current_stage = PipelineStage.TTS
|
||||||
else:
|
else:
|
||||||
# Skip TTS
|
# Skip TTS
|
||||||
@@ -1668,8 +1751,14 @@ class PipelineInput:
|
|||||||
if self.run.end_stage != PipelineStage.INTENT:
|
if self.run.end_stage != PipelineStage.INTENT:
|
||||||
# text-to-speech
|
# text-to-speech
|
||||||
if current_stage == PipelineStage.TTS:
|
if current_stage == PipelineStage.TTS:
|
||||||
assert tts_input is not None
|
if all_targets_in_satellite_area:
|
||||||
await self.run.text_to_speech(tts_input)
|
# Use acknowledge media instead of full response
|
||||||
|
await self.run.text_to_speech(
|
||||||
|
tts_input or "", override_media_path=ACKNOWLEDGE_PATH
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
assert tts_input is not None
|
||||||
|
await self.run.text_to_speech(tts_input)
|
||||||
|
|
||||||
except PipelineError as err:
|
except PipelineError as err:
|
||||||
self.run.process_event(
|
self.run.process_event(
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
|
from dataclasses import replace
|
||||||
|
|
||||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||||
from homeassistant.const import EntityCategory, Platform
|
from homeassistant.const import EntityCategory, Platform
|
||||||
@@ -64,15 +65,36 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
|||||||
translation_key="pipeline",
|
translation_key="pipeline",
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
)
|
)
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_current_option = OPTION_PREFERRED
|
_attr_current_option = OPTION_PREFERRED
|
||||||
_attr_options = [OPTION_PREFERRED]
|
_attr_options = [OPTION_PREFERRED]
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, domain: str, unique_id_prefix: str) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
domain: str,
|
||||||
|
unique_id_prefix: str,
|
||||||
|
index: int = 0,
|
||||||
|
) -> None:
|
||||||
"""Initialize a pipeline selector."""
|
"""Initialize a pipeline selector."""
|
||||||
|
if index < 1:
|
||||||
|
# Keep compatibility
|
||||||
|
key_suffix = ""
|
||||||
|
placeholder = ""
|
||||||
|
else:
|
||||||
|
key_suffix = f"_{index + 1}"
|
||||||
|
placeholder = f" {index + 1}"
|
||||||
|
|
||||||
|
self.entity_description = replace(
|
||||||
|
self.entity_description,
|
||||||
|
key=f"pipeline{key_suffix}",
|
||||||
|
translation_placeholders={"index": placeholder},
|
||||||
|
)
|
||||||
|
|
||||||
self._domain = domain
|
self._domain = domain
|
||||||
self._unique_id_prefix = unique_id_prefix
|
self._unique_id_prefix = unique_id_prefix
|
||||||
self._attr_unique_id = f"{unique_id_prefix}-pipeline"
|
self._attr_unique_id = f"{unique_id_prefix}-{self.entity_description.key}"
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._update_options()
|
self._update_options()
|
||||||
|
|
||||||
@@ -87,7 +109,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
|||||||
)
|
)
|
||||||
|
|
||||||
state = await self.async_get_last_state()
|
state = await self.async_get_last_state()
|
||||||
if state is not None and state.state in self.options:
|
if (state is not None) and (state.state in self.options):
|
||||||
self._attr_current_option = state.state
|
self._attr_current_option = state.state
|
||||||
|
|
||||||
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
||||||
@@ -97,7 +119,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
|||||||
|
|
||||||
def cleanup() -> None:
|
def cleanup() -> None:
|
||||||
"""Clean up registered device."""
|
"""Clean up registered device."""
|
||||||
pipeline_data.pipeline_devices.pop(device_id)
|
pipeline_data.pipeline_devices.pop(device_id, None)
|
||||||
|
|
||||||
self.async_on_remove(cleanup)
|
self.async_on_remove(cleanup)
|
||||||
|
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
},
|
},
|
||||||
"select": {
|
"select": {
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
"name": "Assistant",
|
"name": "Assistant{index}",
|
||||||
"state": {
|
"state": {
|
||||||
"preferred": "Preferred"
|
"preferred": "Preferred"
|
||||||
}
|
}
|
||||||
|
@@ -522,6 +522,7 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
pipeline_id=self._resolve_pipeline(),
|
pipeline_id=self._resolve_pipeline(),
|
||||||
conversation_id=session.conversation_id,
|
conversation_id=session.conversation_id,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
|
satellite_id=self.entity_id,
|
||||||
tts_audio_output=self.tts_options,
|
tts_audio_output=self.tts_options,
|
||||||
wake_word_phrase=wake_word_phrase,
|
wake_word_phrase=wake_word_phrase,
|
||||||
audio_settings=AudioSettings(
|
audio_settings=AudioSettings(
|
||||||
|
@@ -120,6 +120,7 @@ class AsusWrtBridge(ABC):
|
|||||||
|
|
||||||
def __init__(self, host: str) -> None:
|
def __init__(self, host: str) -> None:
|
||||||
"""Initialize Bridge."""
|
"""Initialize Bridge."""
|
||||||
|
self._configuration_url = f"http://{host}"
|
||||||
self._host = host
|
self._host = host
|
||||||
self._firmware: str | None = None
|
self._firmware: str | None = None
|
||||||
self._label_mac: str | None = None
|
self._label_mac: str | None = None
|
||||||
@@ -127,6 +128,11 @@ class AsusWrtBridge(ABC):
|
|||||||
self._model_id: str | None = None
|
self._model_id: str | None = None
|
||||||
self._serial_number: str | None = None
|
self._serial_number: str | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def configuration_url(self) -> str:
|
||||||
|
"""Return configuration URL."""
|
||||||
|
return self._configuration_url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host(self) -> str:
|
def host(self) -> str:
|
||||||
"""Return hostname."""
|
"""Return hostname."""
|
||||||
@@ -371,6 +377,7 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
# get main router properties
|
# get main router properties
|
||||||
if mac := _identity.mac:
|
if mac := _identity.mac:
|
||||||
self._label_mac = format_mac(mac)
|
self._label_mac = format_mac(mac)
|
||||||
|
self._configuration_url = self._api.webpanel
|
||||||
self._firmware = str(_identity.firmware)
|
self._firmware = str(_identity.firmware)
|
||||||
self._model = _identity.model
|
self._model = _identity.model
|
||||||
self._model_id = _identity.product_id
|
self._model_id = _identity.product_id
|
||||||
|
@@ -388,13 +388,13 @@ class AsusWrtRouter:
|
|||||||
def device_info(self) -> DeviceInfo:
|
def device_info(self) -> DeviceInfo:
|
||||||
"""Return the device information."""
|
"""Return the device information."""
|
||||||
info = DeviceInfo(
|
info = DeviceInfo(
|
||||||
|
configuration_url=self._api.configuration_url,
|
||||||
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
|
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
|
||||||
name=self.host,
|
name=self.host,
|
||||||
model=self._api.model or "Asus Router",
|
model=self._api.model or "Asus Router",
|
||||||
model_id=self._api.model_id,
|
model_id=self._api.model_id,
|
||||||
serial_number=self._api.serial_number,
|
serial_number=self._api.serial_number,
|
||||||
manufacturer="Asus",
|
manufacturer="Asus",
|
||||||
configuration_url=f"http://{self.host}",
|
|
||||||
)
|
)
|
||||||
if self._api.firmware:
|
if self._api.firmware:
|
||||||
info["sw_version"] = self._api.firmware
|
info["sw_version"] = self._api.firmware
|
||||||
|
@@ -2,13 +2,12 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Callable, Coroutine
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import ClientResponseError
|
from aiohttp import ClientResponseError
|
||||||
from yalexs.activity import ActivityType, ActivityTypes
|
from yalexs.activity import ActivityType
|
||||||
from yalexs.lock import Lock, LockStatus
|
from yalexs.lock import Lock, LockOperation, LockStatus
|
||||||
from yalexs.util import get_latest_activity, update_lock_detail_from_activity
|
from yalexs.util import get_latest_activity, update_lock_detail_from_activity
|
||||||
|
|
||||||
from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature
|
from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature
|
||||||
@@ -50,30 +49,25 @@ class AugustLock(AugustEntity, RestoreEntity, LockEntity):
|
|||||||
|
|
||||||
async def async_lock(self, **kwargs: Any) -> None:
|
async def async_lock(self, **kwargs: Any) -> None:
|
||||||
"""Lock the device."""
|
"""Lock the device."""
|
||||||
if self._data.push_updates_connected:
|
await self._perform_lock_operation(LockOperation.LOCK)
|
||||||
await self._data.async_lock_async(self._device_id, self._hyper_bridge)
|
|
||||||
return
|
|
||||||
await self._call_lock_operation(self._data.async_lock)
|
|
||||||
|
|
||||||
async def async_open(self, **kwargs: Any) -> None:
|
async def async_open(self, **kwargs: Any) -> None:
|
||||||
"""Open/unlatch the device."""
|
"""Open/unlatch the device."""
|
||||||
if self._data.push_updates_connected:
|
await self._perform_lock_operation(LockOperation.OPEN)
|
||||||
await self._data.async_unlatch_async(self._device_id, self._hyper_bridge)
|
|
||||||
return
|
|
||||||
await self._call_lock_operation(self._data.async_unlatch)
|
|
||||||
|
|
||||||
async def async_unlock(self, **kwargs: Any) -> None:
|
async def async_unlock(self, **kwargs: Any) -> None:
|
||||||
"""Unlock the device."""
|
"""Unlock the device."""
|
||||||
if self._data.push_updates_connected:
|
await self._perform_lock_operation(LockOperation.UNLOCK)
|
||||||
await self._data.async_unlock_async(self._device_id, self._hyper_bridge)
|
|
||||||
return
|
|
||||||
await self._call_lock_operation(self._data.async_unlock)
|
|
||||||
|
|
||||||
async def _call_lock_operation(
|
async def _perform_lock_operation(self, operation: LockOperation) -> None:
|
||||||
self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]]
|
"""Perform a lock operation."""
|
||||||
) -> None:
|
|
||||||
try:
|
try:
|
||||||
activities = await lock_operation(self._device_id)
|
activities = await self._data.async_operate_lock(
|
||||||
|
self._device_id,
|
||||||
|
operation,
|
||||||
|
self._data.push_updates_connected,
|
||||||
|
self._hyper_bridge,
|
||||||
|
)
|
||||||
except ClientResponseError as err:
|
except ClientResponseError as err:
|
||||||
if err.status == LOCK_JAMMED_ERR:
|
if err.status == LOCK_JAMMED_ERR:
|
||||||
self._detail.lock_status = LockStatus.JAMMED
|
self._detail.lock_status = LockStatus.JAMMED
|
||||||
|
@@ -29,5 +29,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pubnub", "yalexs"],
|
"loggers": ["pubnub", "yalexs"],
|
||||||
"requirements": ["yalexs==9.0.1", "yalexs-ble==3.1.2"]
|
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
|
||||||
}
|
}
|
||||||
|
@@ -92,7 +92,11 @@ from homeassistant.components.http.ban import (
|
|||||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||||
from homeassistant.components.http.view import HomeAssistantView
|
from homeassistant.components.http.view import HomeAssistantView
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.network import is_cloud_connection
|
from homeassistant.helpers.network import (
|
||||||
|
NoURLAvailableError,
|
||||||
|
get_url,
|
||||||
|
is_cloud_connection,
|
||||||
|
)
|
||||||
from homeassistant.util.network import is_local
|
from homeassistant.util.network import is_local
|
||||||
|
|
||||||
from . import indieauth
|
from . import indieauth
|
||||||
@@ -125,11 +129,18 @@ class WellKnownOAuthInfoView(HomeAssistantView):
|
|||||||
|
|
||||||
async def get(self, request: web.Request) -> web.Response:
|
async def get(self, request: web.Request) -> web.Response:
|
||||||
"""Return the well known OAuth2 authorization info."""
|
"""Return the well known OAuth2 authorization info."""
|
||||||
|
hass = request.app[KEY_HASS]
|
||||||
|
# Some applications require absolute urls, so we prefer using the
|
||||||
|
# current requests url if possible, with fallback to a relative url.
|
||||||
|
try:
|
||||||
|
url_prefix = get_url(hass, require_current_request=True)
|
||||||
|
except NoURLAvailableError:
|
||||||
|
url_prefix = ""
|
||||||
return self.json(
|
return self.json(
|
||||||
{
|
{
|
||||||
"authorization_endpoint": "/auth/authorize",
|
"authorization_endpoint": f"{url_prefix}/auth/authorize",
|
||||||
"token_endpoint": "/auth/token",
|
"token_endpoint": f"{url_prefix}/auth/token",
|
||||||
"revocation_endpoint": "/auth/revoke",
|
"revocation_endpoint": f"{url_prefix}/auth/revoke",
|
||||||
"response_types_supported": ["code"],
|
"response_types_supported": ["code"],
|
||||||
"service_documentation": (
|
"service_documentation": (
|
||||||
"https://developers.home-assistant.io/docs/auth_api"
|
"https://developers.home-assistant.io/docs/auth_api"
|
||||||
|
@@ -8,7 +8,7 @@ import threading
|
|||||||
from typing import IO, cast
|
from typing import IO, cast
|
||||||
|
|
||||||
from aiohttp import BodyPartReader
|
from aiohttp import BodyPartReader
|
||||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
from aiohttp.hdrs import CONTENT_DISPOSITION, CONTENT_TYPE
|
||||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||||
from multidict import istr
|
from multidict import istr
|
||||||
|
|
||||||
@@ -76,7 +76,8 @@ class DownloadBackupView(HomeAssistantView):
|
|||||||
return Response(status=HTTPStatus.NOT_FOUND)
|
return Response(status=HTTPStatus.NOT_FOUND)
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar",
|
||||||
|
CONTENT_TYPE: "application/x-tar",
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@@ -14,15 +14,15 @@
|
|||||||
},
|
},
|
||||||
"automatic_backup_failed_addons": {
|
"automatic_backup_failed_addons": {
|
||||||
"title": "Not all add-ons could be included in automatic backup",
|
"title": "Not all add-ons could be included in automatic backup",
|
||||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||||
},
|
},
|
||||||
"automatic_backup_failed_agents_addons_folders": {
|
"automatic_backup_failed_agents_addons_folders": {
|
||||||
"title": "Automatic backup was created with errors",
|
"title": "Automatic backup was created with errors",
|
||||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the core and supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||||
},
|
},
|
||||||
"automatic_backup_failed_folders": {
|
"automatic_backup_failed_folders": {
|
||||||
"title": "Not all folders could be included in automatic backup",
|
"title": "Not all folders could be included in automatic backup",
|
||||||
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"services": {
|
"services": {
|
||||||
|
@@ -497,16 +497,18 @@ class BayesianBinarySensor(BinarySensorEntity):
|
|||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
(
|
(
|
||||||
"Observation for entity '%s' returned None, it will not be used"
|
"Observation for entity '%s' returned None, it will not be used"
|
||||||
" for Bayesian updating"
|
" for updating Bayesian sensor '%s'"
|
||||||
),
|
),
|
||||||
observation.entity_id,
|
observation.entity_id,
|
||||||
|
self.entity_id,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
(
|
(
|
||||||
"Observation for template entity returned None rather than a valid"
|
"Observation for template entity returned None rather than a valid"
|
||||||
" boolean, it will not be used for Bayesian updating"
|
" boolean, it will not be used for updating Bayesian sensor '%s'"
|
||||||
),
|
),
|
||||||
|
self.entity_id,
|
||||||
)
|
)
|
||||||
# the prior has been updated and is now the posterior
|
# the prior has been updated and is now the posterior
|
||||||
return prior
|
return prior
|
||||||
|
@@ -8,8 +8,19 @@ import itertools
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from bleak_retry_connector import BleakSlotManager
|
from bleak_retry_connector import BleakSlotManager
|
||||||
from bluetooth_adapters import BluetoothAdapters, adapter_human_name, adapter_model
|
from bluetooth_adapters import (
|
||||||
from habluetooth import BaseHaRemoteScanner, BaseHaScanner, BluetoothManager, HaScanner
|
ADAPTER_TYPE,
|
||||||
|
BluetoothAdapters,
|
||||||
|
adapter_human_name,
|
||||||
|
adapter_model,
|
||||||
|
)
|
||||||
|
from habluetooth import (
|
||||||
|
BaseHaRemoteScanner,
|
||||||
|
BaseHaScanner,
|
||||||
|
BluetoothManager,
|
||||||
|
BluetoothScanningMode,
|
||||||
|
HaScanner,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, EVENT_LOGGING_CHANGED
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, EVENT_LOGGING_CHANGED
|
||||||
@@ -326,7 +337,53 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
|||||||
# Only handle repair issues for local adapters (HaScanner instances)
|
# Only handle repair issues for local adapters (HaScanner instances)
|
||||||
if not isinstance(scanner, HaScanner):
|
if not isinstance(scanner, HaScanner):
|
||||||
return
|
return
|
||||||
|
self.async_check_degraded_mode(scanner)
|
||||||
|
self.async_check_scanning_mode(scanner)
|
||||||
|
|
||||||
|
@hass_callback
|
||||||
|
def async_check_scanning_mode(self, scanner: HaScanner) -> None:
|
||||||
|
"""Check if the scanner is running in passive mode when active mode is requested."""
|
||||||
|
passive_mode_issue_id = f"bluetooth_adapter_passive_mode_{scanner.source}"
|
||||||
|
|
||||||
|
# Check if scanner is NOT in passive mode when active mode was requested
|
||||||
|
if not (
|
||||||
|
scanner.requested_mode is BluetoothScanningMode.ACTIVE
|
||||||
|
and scanner.current_mode is BluetoothScanningMode.PASSIVE
|
||||||
|
):
|
||||||
|
# Delete passive mode issue if it exists and we're not in passive fallback
|
||||||
|
ir.async_delete_issue(self.hass, DOMAIN, passive_mode_issue_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create repair issue for passive mode fallback
|
||||||
|
adapter_name = adapter_human_name(
|
||||||
|
scanner.adapter, scanner.mac_address or "00:00:00:00:00:00"
|
||||||
|
)
|
||||||
|
adapter_details = self._bluetooth_adapters.adapters.get(scanner.adapter)
|
||||||
|
model = adapter_model(adapter_details) if adapter_details else None
|
||||||
|
|
||||||
|
# Determine adapter type for specific instructions
|
||||||
|
# Default to USB for any other type or unknown
|
||||||
|
if adapter_details and adapter_details.get(ADAPTER_TYPE) == "uart":
|
||||||
|
translation_key = "bluetooth_adapter_passive_mode_uart"
|
||||||
|
else:
|
||||||
|
translation_key = "bluetooth_adapter_passive_mode_usb"
|
||||||
|
|
||||||
|
ir.async_create_issue(
|
||||||
|
self.hass,
|
||||||
|
DOMAIN,
|
||||||
|
passive_mode_issue_id,
|
||||||
|
is_fixable=False, # Requires a reboot or unplug
|
||||||
|
severity=ir.IssueSeverity.WARNING,
|
||||||
|
translation_key=translation_key,
|
||||||
|
translation_placeholders={
|
||||||
|
"adapter": adapter_name,
|
||||||
|
"model": model or "Unknown",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
@hass_callback
|
||||||
|
def async_check_degraded_mode(self, scanner: HaScanner) -> None:
|
||||||
|
"""Check if we are in degraded mode and create/delete repair issues."""
|
||||||
issue_id = f"bluetooth_adapter_missing_permissions_{scanner.source}"
|
issue_id = f"bluetooth_adapter_missing_permissions_{scanner.source}"
|
||||||
|
|
||||||
# Delete any existing issue if not in degraded mode
|
# Delete any existing issue if not in degraded mode
|
||||||
|
@@ -18,9 +18,9 @@
|
|||||||
"bleak==1.0.1",
|
"bleak==1.0.1",
|
||||||
"bleak-retry-connector==4.4.3",
|
"bleak-retry-connector==4.4.3",
|
||||||
"bluetooth-adapters==2.1.0",
|
"bluetooth-adapters==2.1.0",
|
||||||
"bluetooth-auto-recovery==1.5.2",
|
"bluetooth-auto-recovery==1.5.3",
|
||||||
"bluetooth-data-tools==1.28.2",
|
"bluetooth-data-tools==1.28.2",
|
||||||
"dbus-fast==2.44.3",
|
"dbus-fast==2.44.3",
|
||||||
"habluetooth==5.6.2"
|
"habluetooth==5.6.4"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -43,6 +43,14 @@
|
|||||||
"bluetooth_adapter_missing_permissions": {
|
"bluetooth_adapter_missing_permissions": {
|
||||||
"title": "Bluetooth adapter requires additional permissions",
|
"title": "Bluetooth adapter requires additional permissions",
|
||||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is operating in degraded mode because your container needs additional permissions to fully access Bluetooth hardware.\n\nPlease follow the instructions in our documentation to add the required permissions:\n[Bluetooth permissions for Docker]({docs_url})\n\nAfter adding these permissions, restart your Home Assistant container for the changes to take effect."
|
"description": "The Bluetooth adapter **{adapter}** ({model}) is operating in degraded mode because your container needs additional permissions to fully access Bluetooth hardware.\n\nPlease follow the instructions in our documentation to add the required permissions:\n[Bluetooth permissions for Docker]({docs_url})\n\nAfter adding these permissions, restart your Home Assistant container for the changes to take effect."
|
||||||
|
},
|
||||||
|
"bluetooth_adapter_passive_mode_usb": {
|
||||||
|
"title": "Bluetooth USB adapter requires manual power cycle",
|
||||||
|
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the adapter requires a manual power cycle to recover.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Unplug the USB adapter**\n2. Wait 5 seconds\n3. **Plug it back in**\n4. Wait for Home Assistant to detect the adapter\n\nIf the issue persists after power cycling:\n- Try a different USB port\n- Check for kernel/firmware updates\n- Consider using a different Bluetooth adapter"
|
||||||
|
},
|
||||||
|
"bluetooth_adapter_passive_mode_uart": {
|
||||||
|
"title": "Bluetooth adapter requires system power cycle",
|
||||||
|
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the system requires a complete power cycle to recover the adapter.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Shut down the system completely** (not just a reboot)\n2. **Remove power** (unplug or turn off at the switch)\n3. Wait 10 seconds\n4. Restore power and boot the system\n\nIf the issue persists after power cycling:\n- Check for kernel/firmware updates\n- The onboard Bluetooth adapter may have hardware issues"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -18,8 +18,10 @@ async def async_get_config_entry_diagnostics(
|
|||||||
coordinator = config_entry.runtime_data
|
coordinator = config_entry.runtime_data
|
||||||
|
|
||||||
device_info = await coordinator.client.get_system_info()
|
device_info = await coordinator.client.get_system_info()
|
||||||
|
command_list = await coordinator.client.get_command_list()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
"remote_command_list": command_list,
|
||||||
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
|
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
|
||||||
"device_info": async_redact_data(device_info, TO_REDACT),
|
"device_info": async_redact_data(device_info, TO_REDACT),
|
||||||
}
|
}
|
||||||
|
@@ -205,6 +205,7 @@ class BringActivityCoordinator(BringBaseCoordinator[dict[str, BringActivityData]
|
|||||||
|
|
||||||
async def _async_update_data(self) -> dict[str, BringActivityData]:
|
async def _async_update_data(self) -> dict[str, BringActivityData]:
|
||||||
"""Fetch activity data from bring."""
|
"""Fetch activity data from bring."""
|
||||||
|
self.lists = self.coordinator.lists
|
||||||
|
|
||||||
list_dict: dict[str, BringActivityData] = {}
|
list_dict: dict[str, BringActivityData] = {}
|
||||||
for lst in self.lists:
|
for lst in self.lists:
|
||||||
|
@@ -43,7 +43,7 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
lists_added |= new_lists
|
lists_added |= new_lists
|
||||||
|
|
||||||
coordinator.activity.async_add_listener(add_entities)
|
coordinator.data.async_add_listener(add_entities)
|
||||||
add_entities()
|
add_entities()
|
||||||
|
|
||||||
|
|
||||||
@@ -67,7 +67,8 @@ class BringEventEntity(BringBaseEntity, EventEntity):
|
|||||||
|
|
||||||
def _async_handle_event(self) -> None:
|
def _async_handle_event(self) -> None:
|
||||||
"""Handle the activity event."""
|
"""Handle the activity event."""
|
||||||
bring_list = self.coordinator.data[self._list_uuid]
|
if (bring_list := self.coordinator.data.get(self._list_uuid)) is None:
|
||||||
|
return
|
||||||
last_event_triggered = self.state
|
last_event_triggered = self.state
|
||||||
if bring_list.activity.timeline and (
|
if bring_list.activity.timeline and (
|
||||||
last_event_triggered is None
|
last_event_triggered is None
|
||||||
|
@@ -2,28 +2,40 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
from brother import Brother, SnmpError
|
from brother import Brother, SnmpError
|
||||||
|
|
||||||
from homeassistant.components.snmp import async_get_snmp_engine
|
from homeassistant.components.snmp import async_get_snmp_engine
|
||||||
from homeassistant.const import CONF_HOST, CONF_TYPE, Platform
|
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import (
|
||||||
|
CONF_COMMUNITY,
|
||||||
|
DEFAULT_COMMUNITY,
|
||||||
|
DEFAULT_PORT,
|
||||||
|
DOMAIN,
|
||||||
|
SECTION_ADVANCED_SETTINGS,
|
||||||
|
)
|
||||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
PLATFORMS = [Platform.SENSOR]
|
PLATFORMS = [Platform.SENSOR]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||||
"""Set up Brother from a config entry."""
|
"""Set up Brother from a config entry."""
|
||||||
host = entry.data[CONF_HOST]
|
host = entry.data[CONF_HOST]
|
||||||
|
port = entry.data[SECTION_ADVANCED_SETTINGS][CONF_PORT]
|
||||||
|
community = entry.data[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY]
|
||||||
printer_type = entry.data[CONF_TYPE]
|
printer_type = entry.data[CONF_TYPE]
|
||||||
|
|
||||||
snmp_engine = await async_get_snmp_engine(hass)
|
snmp_engine = await async_get_snmp_engine(hass)
|
||||||
try:
|
try:
|
||||||
brother = await Brother.create(
|
brother = await Brother.create(
|
||||||
host, printer_type=printer_type, snmp_engine=snmp_engine
|
host, port, community, printer_type=printer_type, snmp_engine=snmp_engine
|
||||||
)
|
)
|
||||||
except (ConnectionError, SnmpError, TimeoutError) as error:
|
except (ConnectionError, SnmpError, TimeoutError) as error:
|
||||||
raise ConfigEntryNotReady(
|
raise ConfigEntryNotReady(
|
||||||
@@ -48,3 +60,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
|||||||
async def async_unload_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_migrate_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||||
|
"""Migrate an old entry."""
|
||||||
|
if entry.version == 1 and entry.minor_version < 2:
|
||||||
|
new_data = entry.data.copy()
|
||||||
|
new_data[SECTION_ADVANCED_SETTINGS] = {
|
||||||
|
CONF_PORT: DEFAULT_PORT,
|
||||||
|
CONF_COMMUNITY: DEFAULT_COMMUNITY,
|
||||||
|
}
|
||||||
|
hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2)
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Migration to configuration version %s.%s successful",
|
||||||
|
entry.version,
|
||||||
|
entry.minor_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
@@ -9,21 +9,65 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.components.snmp import async_get_snmp_engine
|
from homeassistant.components.snmp import async_get_snmp_engine
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_HOST, CONF_TYPE
|
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.data_entry_flow import section
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||||
from homeassistant.util.network import is_host_valid
|
from homeassistant.util.network import is_host_valid
|
||||||
|
|
||||||
from .const import DOMAIN, PRINTER_TYPES
|
from .const import (
|
||||||
|
CONF_COMMUNITY,
|
||||||
|
DEFAULT_COMMUNITY,
|
||||||
|
DEFAULT_PORT,
|
||||||
|
DOMAIN,
|
||||||
|
PRINTER_TYPES,
|
||||||
|
SECTION_ADVANCED_SETTINGS,
|
||||||
|
)
|
||||||
|
|
||||||
DATA_SCHEMA = vol.Schema(
|
DATA_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_HOST): str,
|
vol.Required(CONF_HOST): str,
|
||||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||||
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||||
|
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
{"collapsed": True},
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
ZEROCONF_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||||
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||||
|
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
{"collapsed": True},
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
RECONFIGURE_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_HOST): str,
|
||||||
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||||
|
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
{"collapsed": True},
|
||||||
|
),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
RECONFIGURE_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
|
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(
|
async def validate_input(
|
||||||
@@ -35,7 +79,12 @@ async def validate_input(
|
|||||||
|
|
||||||
snmp_engine = await async_get_snmp_engine(hass)
|
snmp_engine = await async_get_snmp_engine(hass)
|
||||||
|
|
||||||
brother = await Brother.create(user_input[CONF_HOST], snmp_engine=snmp_engine)
|
brother = await Brother.create(
|
||||||
|
user_input[CONF_HOST],
|
||||||
|
user_input[SECTION_ADVANCED_SETTINGS][CONF_PORT],
|
||||||
|
user_input[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY],
|
||||||
|
snmp_engine=snmp_engine,
|
||||||
|
)
|
||||||
await brother.async_update()
|
await brother.async_update()
|
||||||
|
|
||||||
if expected_mac is not None and brother.serial.lower() != expected_mac:
|
if expected_mac is not None and brother.serial.lower() != expected_mac:
|
||||||
@@ -48,6 +97,7 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
"""Handle a config flow for Brother Printer."""
|
"""Handle a config flow for Brother Printer."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
MINOR_VERSION = 2
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
@@ -126,13 +176,11 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
title = f"{self.brother.model} {self.brother.serial}"
|
title = f"{self.brother.model} {self.brother.serial}"
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=title,
|
title=title,
|
||||||
data={CONF_HOST: self.host, CONF_TYPE: user_input[CONF_TYPE]},
|
data={CONF_HOST: self.host, **user_input},
|
||||||
)
|
)
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="zeroconf_confirm",
|
step_id="zeroconf_confirm",
|
||||||
data_schema=vol.Schema(
|
data_schema=ZEROCONF_SCHEMA,
|
||||||
{vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES)}
|
|
||||||
),
|
|
||||||
description_placeholders={
|
description_placeholders={
|
||||||
"serial_number": self.brother.serial,
|
"serial_number": self.brother.serial,
|
||||||
"model": self.brother.model,
|
"model": self.brother.model,
|
||||||
@@ -160,7 +208,7 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
else:
|
else:
|
||||||
return self.async_update_reload_and_abort(
|
return self.async_update_reload_and_abort(
|
||||||
entry,
|
entry,
|
||||||
data_updates={CONF_HOST: user_input[CONF_HOST]},
|
data_updates=user_input,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
|
@@ -10,3 +10,10 @@ DOMAIN: Final = "brother"
|
|||||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||||
|
|
||||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
|
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||||
|
|
||||||
|
CONF_COMMUNITY = "community"
|
||||||
|
|
||||||
|
DEFAULT_COMMUNITY = "public"
|
||||||
|
DEFAULT_PORT = 161
|
||||||
|
@@ -8,7 +8,21 @@
|
|||||||
"type": "Type of the printer"
|
"type": "Type of the printer"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"host": "The hostname or IP address of the Brother printer to control."
|
"host": "The hostname or IP address of the Brother printer to control.",
|
||||||
|
"type": "Brother printer type: ink or laser."
|
||||||
|
},
|
||||||
|
"sections": {
|
||||||
|
"advanced_settings": {
|
||||||
|
"name": "Advanced settings",
|
||||||
|
"data": {
|
||||||
|
"port": "[%key:common::config_flow::data::port%]",
|
||||||
|
"community": "SNMP Community"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"port": "The SNMP port of the Brother printer.",
|
||||||
|
"community": "A simple password for devices to communicate to each other."
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"zeroconf_confirm": {
|
"zeroconf_confirm": {
|
||||||
@@ -16,6 +30,22 @@
|
|||||||
"title": "Discovered Brother Printer",
|
"title": "Discovered Brother Printer",
|
||||||
"data": {
|
"data": {
|
||||||
"type": "[%key:component::brother::config::step::user::data::type%]"
|
"type": "[%key:component::brother::config::step::user::data::type%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"type": "[%key:component::brother::config::step::user::data_description::type%]"
|
||||||
|
},
|
||||||
|
"sections": {
|
||||||
|
"advanced_settings": {
|
||||||
|
"name": "Advanced settings",
|
||||||
|
"data": {
|
||||||
|
"port": "[%key:common::config_flow::data::port%]",
|
||||||
|
"community": "SNMP Community"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"port": "The SNMP port of the Brother printer.",
|
||||||
|
"community": "A simple password for devices to communicate to each other."
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"reconfigure": {
|
"reconfigure": {
|
||||||
@@ -25,6 +55,19 @@
|
|||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"host": "[%key:component::brother::config::step::user::data_description::host%]"
|
"host": "[%key:component::brother::config::step::user::data_description::host%]"
|
||||||
|
},
|
||||||
|
"sections": {
|
||||||
|
"advanced_settings": {
|
||||||
|
"name": "Advanced settings",
|
||||||
|
"data": {
|
||||||
|
"port": "[%key:common::config_flow::data::port%]",
|
||||||
|
"community": "SNMP Community"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"port": "The SNMP port of the Brother printer.",
|
||||||
|
"community": "A simple password for devices to communicate to each other."
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -20,5 +20,5 @@
|
|||||||
"dependencies": ["bluetooth_adapters"],
|
"dependencies": ["bluetooth_adapters"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": ["bthome-ble==3.13.1"]
|
"requirements": ["bthome-ble==3.14.2"]
|
||||||
}
|
}
|
||||||
|
@@ -25,6 +25,7 @@ from homeassistant.const import (
|
|||||||
DEGREE,
|
DEGREE,
|
||||||
LIGHT_LUX,
|
LIGHT_LUX,
|
||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
|
REVOLUTIONS_PER_MINUTE,
|
||||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||||
EntityCategory,
|
EntityCategory,
|
||||||
UnitOfConductivity,
|
UnitOfConductivity,
|
||||||
@@ -269,6 +270,15 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
native_unit_of_measurement=DEGREE,
|
native_unit_of_measurement=DEGREE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
|
# Rotational speed (rpm)
|
||||||
|
(
|
||||||
|
BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED,
|
||||||
|
Units.REVOLUTIONS_PER_MINUTE,
|
||||||
|
): SensorEntityDescription(
|
||||||
|
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
|
||||||
|
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
),
|
||||||
# Signal Strength (RSSI) (dB)
|
# Signal Strength (RSSI) (dB)
|
||||||
(
|
(
|
||||||
BTHomeSensorDeviceClass.SIGNAL_STRENGTH,
|
BTHomeSensorDeviceClass.SIGNAL_STRENGTH,
|
||||||
|
@@ -37,6 +37,10 @@ from homeassistant.exceptions import HomeAssistantError
|
|||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
|
from homeassistant.loader import (
|
||||||
|
async_get_custom_components,
|
||||||
|
async_get_loaded_integration,
|
||||||
|
)
|
||||||
from homeassistant.util.location import async_detect_location_info
|
from homeassistant.util.location import async_detect_location_info
|
||||||
|
|
||||||
from .alexa_config import entity_supported as entity_supported_by_alexa
|
from .alexa_config import entity_supported as entity_supported_by_alexa
|
||||||
@@ -431,6 +435,79 @@ class DownloadSupportPackageView(HomeAssistantView):
|
|||||||
url = "/api/cloud/support_package"
|
url = "/api/cloud/support_package"
|
||||||
name = "api:cloud:support_package"
|
name = "api:cloud:support_package"
|
||||||
|
|
||||||
|
async def _get_integration_info(self, hass: HomeAssistant) -> dict[str, Any]:
|
||||||
|
"""Collect information about active and custom integrations."""
|
||||||
|
# Get loaded components from hass.config.components
|
||||||
|
loaded_components = hass.config.components.copy()
|
||||||
|
|
||||||
|
# Get custom integrations
|
||||||
|
custom_domains = set()
|
||||||
|
with suppress(Exception):
|
||||||
|
custom_domains = set(await async_get_custom_components(hass))
|
||||||
|
|
||||||
|
# Separate built-in and custom integrations
|
||||||
|
builtin_integrations = []
|
||||||
|
custom_integrations = []
|
||||||
|
|
||||||
|
for domain in sorted(loaded_components):
|
||||||
|
try:
|
||||||
|
integration = async_get_loaded_integration(hass, domain)
|
||||||
|
except Exception: # noqa: BLE001
|
||||||
|
# Broad exception catch for robustness in support package
|
||||||
|
# generation. If we can't get integration info,
|
||||||
|
# just add the domain
|
||||||
|
if domain in custom_domains:
|
||||||
|
custom_integrations.append(
|
||||||
|
{
|
||||||
|
"domain": domain,
|
||||||
|
"name": "Unknown",
|
||||||
|
"version": "Unknown",
|
||||||
|
"documentation": "Unknown",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
builtin_integrations.append(
|
||||||
|
{
|
||||||
|
"domain": domain,
|
||||||
|
"name": "Unknown",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if domain in custom_domains:
|
||||||
|
# This is a custom integration
|
||||||
|
# include version and documentation link
|
||||||
|
version = (
|
||||||
|
str(integration.version) if integration.version else "Unknown"
|
||||||
|
)
|
||||||
|
if not (documentation := integration.documentation):
|
||||||
|
documentation = "Unknown"
|
||||||
|
|
||||||
|
custom_integrations.append(
|
||||||
|
{
|
||||||
|
"domain": domain,
|
||||||
|
"name": integration.name,
|
||||||
|
"version": version,
|
||||||
|
"documentation": documentation,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# This is a built-in integration.
|
||||||
|
# No version needed, as it is always the same as the
|
||||||
|
# Home Assistant version
|
||||||
|
builtin_integrations.append(
|
||||||
|
{
|
||||||
|
"domain": domain,
|
||||||
|
"name": integration.name,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"builtin_count": len(builtin_integrations),
|
||||||
|
"builtin_integrations": builtin_integrations,
|
||||||
|
"custom_count": len(custom_integrations),
|
||||||
|
"custom_integrations": custom_integrations,
|
||||||
|
}
|
||||||
|
|
||||||
async def _generate_markdown(
|
async def _generate_markdown(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@@ -453,6 +530,38 @@ class DownloadSupportPackageView(HomeAssistantView):
|
|||||||
markdown = "## System Information\n\n"
|
markdown = "## System Information\n\n"
|
||||||
markdown += get_domain_table_markdown(hass_info)
|
markdown += get_domain_table_markdown(hass_info)
|
||||||
|
|
||||||
|
# Add integration information
|
||||||
|
try:
|
||||||
|
integration_info = await self._get_integration_info(hass)
|
||||||
|
except Exception: # noqa: BLE001
|
||||||
|
# Broad exception catch for robustness in support package generation
|
||||||
|
# If there's any error getting integration info, just note it
|
||||||
|
markdown += "## Active integrations\n\n"
|
||||||
|
markdown += "Unable to collect integration information\n\n"
|
||||||
|
else:
|
||||||
|
markdown += "## Active Integrations\n\n"
|
||||||
|
markdown += f"Built-in integrations: {integration_info['builtin_count']}\n"
|
||||||
|
markdown += f"Custom integrations: {integration_info['custom_count']}\n\n"
|
||||||
|
|
||||||
|
# Built-in integrations
|
||||||
|
if integration_info["builtin_integrations"]:
|
||||||
|
markdown += "<details><summary>Built-in integrations</summary>\n\n"
|
||||||
|
markdown += "Domain | Name\n"
|
||||||
|
markdown += "--- | ---\n"
|
||||||
|
for integration in integration_info["builtin_integrations"]:
|
||||||
|
markdown += f"{integration['domain']} | {integration['name']}\n"
|
||||||
|
markdown += "\n</details>\n\n"
|
||||||
|
|
||||||
|
# Custom integrations
|
||||||
|
if integration_info["custom_integrations"]:
|
||||||
|
markdown += "<details><summary>Custom integrations</summary>\n\n"
|
||||||
|
markdown += "Domain | Name | Version | Documentation\n"
|
||||||
|
markdown += "--- | --- | --- | ---\n"
|
||||||
|
for integration in integration_info["custom_integrations"]:
|
||||||
|
doc_url = integration.get("documentation") or "N/A"
|
||||||
|
markdown += f"{integration['domain']} | {integration['name']} | {integration['version']} | {doc_url}\n"
|
||||||
|
markdown += "\n</details>\n\n"
|
||||||
|
|
||||||
for domain, domain_info in domains_info.items():
|
for domain, domain_info in domains_info.items():
|
||||||
domain_info_md = get_domain_table_markdown(domain_info)
|
domain_info_md = get_domain_table_markdown(domain_info)
|
||||||
markdown += (
|
markdown += (
|
||||||
|
@@ -13,6 +13,6 @@
|
|||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||||
"requirements": ["hass-nabucasa==1.1.0"],
|
"requirements": ["hass-nabucasa==1.1.1"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@@ -25,7 +25,11 @@ async def async_subscription_info(cloud: Cloud[CloudClient]) -> SubscriptionInfo
|
|||||||
return await cloud.payments.subscription_info()
|
return await cloud.payments.subscription_info()
|
||||||
except PaymentsApiError as exception:
|
except PaymentsApiError as exception:
|
||||||
_LOGGER.error("Failed to fetch subscription information - %s", exception)
|
_LOGGER.error("Failed to fetch subscription information - %s", exception)
|
||||||
|
except TimeoutError:
|
||||||
|
_LOGGER.error(
|
||||||
|
"A timeout of %s was reached while trying to fetch subscription information",
|
||||||
|
REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import TypeVar
|
from typing import Any, TypeVar
|
||||||
|
|
||||||
from aiocomelit.api import (
|
from aiocomelit.api import (
|
||||||
AlarmDataObject,
|
AlarmDataObject,
|
||||||
@@ -13,7 +13,16 @@ from aiocomelit.api import (
|
|||||||
ComelitVedoAreaObject,
|
ComelitVedoAreaObject,
|
||||||
ComelitVedoZoneObject,
|
ComelitVedoZoneObject,
|
||||||
)
|
)
|
||||||
from aiocomelit.const import BRIDGE, VEDO
|
from aiocomelit.const import (
|
||||||
|
BRIDGE,
|
||||||
|
CLIMATE,
|
||||||
|
COVER,
|
||||||
|
IRRIGATION,
|
||||||
|
LIGHT,
|
||||||
|
OTHER,
|
||||||
|
SCENARIO,
|
||||||
|
VEDO,
|
||||||
|
)
|
||||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
@@ -111,6 +120,32 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
|||||||
async def _async_update_system_data(self) -> T:
|
async def _async_update_system_data(self) -> T:
|
||||||
"""Class method for updating data."""
|
"""Class method for updating data."""
|
||||||
|
|
||||||
|
async def _async_remove_stale_devices(
|
||||||
|
self,
|
||||||
|
previous_list: dict[int, Any],
|
||||||
|
current_list: dict[int, Any],
|
||||||
|
dev_type: str,
|
||||||
|
) -> None:
|
||||||
|
"""Remove stale devices."""
|
||||||
|
device_registry = dr.async_get(self.hass)
|
||||||
|
|
||||||
|
for i in previous_list:
|
||||||
|
if i not in current_list:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Detected change in %s devices: index %s removed",
|
||||||
|
dev_type,
|
||||||
|
i,
|
||||||
|
)
|
||||||
|
identifier = f"{self.config_entry.entry_id}-{dev_type}-{i}"
|
||||||
|
device = device_registry.async_get_device(
|
||||||
|
identifiers={(DOMAIN, identifier)}
|
||||||
|
)
|
||||||
|
if device:
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device_id=device.id,
|
||||||
|
remove_config_entry_id=self.config_entry.entry_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ComelitSerialBridge(
|
class ComelitSerialBridge(
|
||||||
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
|
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
|
||||||
@@ -137,7 +172,15 @@ class ComelitSerialBridge(
|
|||||||
self,
|
self,
|
||||||
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
||||||
"""Specific method for updating data."""
|
"""Specific method for updating data."""
|
||||||
return await self.api.get_all_devices()
|
data = await self.api.get_all_devices()
|
||||||
|
|
||||||
|
if self.data:
|
||||||
|
for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO):
|
||||||
|
await self._async_remove_stale_devices(
|
||||||
|
self.data[dev_type], data[dev_type], dev_type
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||||
@@ -163,4 +206,14 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
|||||||
self,
|
self,
|
||||||
) -> AlarmDataObject:
|
) -> AlarmDataObject:
|
||||||
"""Specific method for updating data."""
|
"""Specific method for updating data."""
|
||||||
return await self.api.get_all_areas_and_zones()
|
data = await self.api.get_all_areas_and_zones()
|
||||||
|
|
||||||
|
if self.data:
|
||||||
|
for obj_type in ("alarm_areas", "alarm_zones"):
|
||||||
|
await self._async_remove_stale_devices(
|
||||||
|
self.data[obj_type],
|
||||||
|
data[obj_type],
|
||||||
|
"area" if obj_type == "alarm_areas" else "zone",
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
@@ -72,9 +72,7 @@ rules:
|
|||||||
repair-issues:
|
repair-issues:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: no known use cases for repair issues or flows, yet
|
comment: no known use cases for repair issues or flows, yet
|
||||||
stale-devices:
|
stale-devices: done
|
||||||
status: todo
|
|
||||||
comment: missing implementation
|
|
||||||
|
|
||||||
# Platinum
|
# Platinum
|
||||||
async-dependency: done
|
async-dependency: done
|
||||||
|
45
homeassistant/components/compit/__init__.py
Normal file
45
homeassistant/components/compit/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
"""The Compit integration."""
|
||||||
|
|
||||||
|
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||||
|
|
||||||
|
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
|
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||||
|
|
||||||
|
PLATFORMS = [
|
||||||
|
Platform.CLIMATE,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||||
|
"""Set up Compit from a config entry."""
|
||||||
|
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
connector = CompitApiConnector(session)
|
||||||
|
try:
|
||||||
|
connected = await connector.init(
|
||||||
|
entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], hass.config.language
|
||||||
|
)
|
||||||
|
except CannotConnect as e:
|
||||||
|
raise ConfigEntryNotReady(f"Error while connecting to Compit: {e}") from e
|
||||||
|
except InvalidAuth as e:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
f"Invalid credentials for {entry.data[CONF_EMAIL]}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
if not connected:
|
||||||
|
raise ConfigEntryAuthFailed("Authentication API error")
|
||||||
|
|
||||||
|
coordinator = CompitDataUpdateCoordinator(hass, entry, connector)
|
||||||
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
entry.runtime_data = coordinator
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||||
|
"""Unload an entry for the Compit integration."""
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
265
homeassistant/components/compit/climate.py
Normal file
265
homeassistant/components/compit/climate.py
Normal file
@@ -0,0 +1,265 @@
|
|||||||
|
"""Module contains the CompitClimate class for controlling climate entities."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from compit_inext_api import Param, Parameter
|
||||||
|
from compit_inext_api.consts import (
|
||||||
|
CompitFanMode,
|
||||||
|
CompitHVACMode,
|
||||||
|
CompitParameter,
|
||||||
|
CompitPresetMode,
|
||||||
|
)
|
||||||
|
from propcache.api import cached_property
|
||||||
|
|
||||||
|
from homeassistant.components.climate import (
|
||||||
|
FAN_AUTO,
|
||||||
|
FAN_HIGH,
|
||||||
|
FAN_LOW,
|
||||||
|
FAN_MEDIUM,
|
||||||
|
FAN_OFF,
|
||||||
|
PRESET_AWAY,
|
||||||
|
PRESET_ECO,
|
||||||
|
PRESET_HOME,
|
||||||
|
PRESET_NONE,
|
||||||
|
ClimateEntity,
|
||||||
|
ClimateEntityFeature,
|
||||||
|
HVACMode,
|
||||||
|
)
|
||||||
|
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .const import DOMAIN, MANUFACTURER_NAME
|
||||||
|
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Device class for climate devices in Compit system
|
||||||
|
CLIMATE_DEVICE_CLASS = 10
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
COMPIT_MODE_MAP = {
|
||||||
|
CompitHVACMode.COOL: HVACMode.COOL,
|
||||||
|
CompitHVACMode.HEAT: HVACMode.HEAT,
|
||||||
|
CompitHVACMode.OFF: HVACMode.OFF,
|
||||||
|
}
|
||||||
|
|
||||||
|
COMPIT_FANSPEED_MAP = {
|
||||||
|
CompitFanMode.OFF: FAN_OFF,
|
||||||
|
CompitFanMode.AUTO: FAN_AUTO,
|
||||||
|
CompitFanMode.LOW: FAN_LOW,
|
||||||
|
CompitFanMode.MEDIUM: FAN_MEDIUM,
|
||||||
|
CompitFanMode.HIGH: FAN_HIGH,
|
||||||
|
CompitFanMode.HOLIDAY: FAN_AUTO,
|
||||||
|
}
|
||||||
|
|
||||||
|
COMPIT_PRESET_MAP = {
|
||||||
|
CompitPresetMode.AUTO: PRESET_HOME,
|
||||||
|
CompitPresetMode.HOLIDAY: PRESET_ECO,
|
||||||
|
CompitPresetMode.MANUAL: PRESET_NONE,
|
||||||
|
CompitPresetMode.AWAY: PRESET_AWAY,
|
||||||
|
}
|
||||||
|
|
||||||
|
HVAC_MODE_TO_COMPIT_MODE = {v: k for k, v in COMPIT_MODE_MAP.items()}
|
||||||
|
FAN_MODE_TO_COMPIT_FAN_MODE = {v: k for k, v in COMPIT_FANSPEED_MAP.items()}
|
||||||
|
PRESET_MODE_TO_COMPIT_PRESET_MODE = {v: k for k, v in COMPIT_PRESET_MAP.items()}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: CompitConfigEntry,
|
||||||
|
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the CompitClimate platform from a config entry."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
climate_entities = []
|
||||||
|
for device_id in coordinator.connector.all_devices:
|
||||||
|
device = coordinator.connector.all_devices[device_id]
|
||||||
|
|
||||||
|
if device.definition.device_class == CLIMATE_DEVICE_CLASS:
|
||||||
|
climate_entities.append(
|
||||||
|
CompitClimate(
|
||||||
|
coordinator,
|
||||||
|
device_id,
|
||||||
|
{
|
||||||
|
parameter.parameter_code: parameter
|
||||||
|
for parameter in device.definition.parameters
|
||||||
|
},
|
||||||
|
device.definition.name,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
async_add_devices(climate_entities)
|
||||||
|
|
||||||
|
|
||||||
|
class CompitClimate(CoordinatorEntity[CompitDataUpdateCoordinator], ClimateEntity):
|
||||||
|
"""Representation of a Compit climate device."""
|
||||||
|
|
||||||
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
|
_attr_hvac_modes = [*COMPIT_MODE_MAP.values()]
|
||||||
|
_attr_name = None
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_supported_features = (
|
||||||
|
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
|
| ClimateEntityFeature.FAN_MODE
|
||||||
|
| ClimateEntityFeature.PRESET_MODE
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: CompitDataUpdateCoordinator,
|
||||||
|
device_id: int,
|
||||||
|
parameters: dict[str, Parameter],
|
||||||
|
device_name: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the climate device."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self._attr_unique_id = f"{device_name}_{device_id}"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, str(device_id))},
|
||||||
|
name=device_name,
|
||||||
|
manufacturer=MANUFACTURER_NAME,
|
||||||
|
model=device_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parameters = parameters
|
||||||
|
self.device_id = device_id
|
||||||
|
self.available_presets: Parameter | None = self.parameters.get(
|
||||||
|
CompitParameter.PRESET_MODE.value
|
||||||
|
)
|
||||||
|
self.available_fan_modes: Parameter | None = self.parameters.get(
|
||||||
|
CompitParameter.FAN_MODE.value
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return if entity is available."""
|
||||||
|
return (
|
||||||
|
super().available
|
||||||
|
and self.device_id in self.coordinator.connector.all_devices
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_temperature(self) -> float | None:
|
||||||
|
"""Return the current temperature."""
|
||||||
|
value = self.get_parameter_value(CompitParameter.CURRENT_TEMPERATURE)
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return float(value.value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature(self) -> float | None:
|
||||||
|
"""Return the temperature we try to reach."""
|
||||||
|
value = self.get_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE)
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return float(value.value)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def preset_modes(self) -> list[str] | None:
|
||||||
|
"""Return the available preset modes."""
|
||||||
|
if self.available_presets is None or self.available_presets.details is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
preset_modes = []
|
||||||
|
for item in self.available_presets.details:
|
||||||
|
if item is not None:
|
||||||
|
ha_preset = COMPIT_PRESET_MAP.get(CompitPresetMode(item.state))
|
||||||
|
if ha_preset and ha_preset not in preset_modes:
|
||||||
|
preset_modes.append(ha_preset)
|
||||||
|
|
||||||
|
return preset_modes
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def fan_modes(self) -> list[str] | None:
|
||||||
|
"""Return the available fan modes."""
|
||||||
|
if self.available_fan_modes is None or self.available_fan_modes.details is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
fan_modes = []
|
||||||
|
for item in self.available_fan_modes.details:
|
||||||
|
if item is not None:
|
||||||
|
ha_fan_mode = COMPIT_FANSPEED_MAP.get(CompitFanMode(item.state))
|
||||||
|
if ha_fan_mode and ha_fan_mode not in fan_modes:
|
||||||
|
fan_modes.append(ha_fan_mode)
|
||||||
|
|
||||||
|
return fan_modes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def preset_mode(self) -> str | None:
|
||||||
|
"""Return the current preset mode."""
|
||||||
|
preset_mode = self.get_parameter_value(CompitParameter.PRESET_MODE)
|
||||||
|
|
||||||
|
if preset_mode:
|
||||||
|
compit_preset_mode = CompitPresetMode(preset_mode.value)
|
||||||
|
return COMPIT_PRESET_MAP.get(compit_preset_mode)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fan_mode(self) -> str | None:
|
||||||
|
"""Return the current fan mode."""
|
||||||
|
fan_mode = self.get_parameter_value(CompitParameter.FAN_MODE)
|
||||||
|
if fan_mode:
|
||||||
|
compit_fan_mode = CompitFanMode(fan_mode.value)
|
||||||
|
return COMPIT_FANSPEED_MAP.get(compit_fan_mode)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_mode(self) -> HVACMode | None:
|
||||||
|
"""Return the current HVAC mode."""
|
||||||
|
hvac_mode = self.get_parameter_value(CompitParameter.HVAC_MODE)
|
||||||
|
if hvac_mode:
|
||||||
|
compit_hvac_mode = CompitHVACMode(hvac_mode.value)
|
||||||
|
return COMPIT_MODE_MAP.get(compit_hvac_mode)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
|
"""Set new target temperature."""
|
||||||
|
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||||
|
if temp is None:
|
||||||
|
raise ServiceValidationError("Temperature argument missing")
|
||||||
|
await self.set_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE, temp)
|
||||||
|
|
||||||
|
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||||
|
"""Set new target HVAC mode."""
|
||||||
|
|
||||||
|
if not (mode := HVAC_MODE_TO_COMPIT_MODE.get(hvac_mode)):
|
||||||
|
raise ServiceValidationError(f"Invalid hvac mode {hvac_mode}")
|
||||||
|
|
||||||
|
await self.set_parameter_value(CompitParameter.HVAC_MODE, mode.value)
|
||||||
|
|
||||||
|
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||||
|
"""Set new target preset mode."""
|
||||||
|
|
||||||
|
compit_preset = PRESET_MODE_TO_COMPIT_PRESET_MODE.get(preset_mode)
|
||||||
|
if compit_preset is None:
|
||||||
|
raise ServiceValidationError(f"Invalid preset mode: {preset_mode}")
|
||||||
|
|
||||||
|
await self.set_parameter_value(CompitParameter.PRESET_MODE, compit_preset.value)
|
||||||
|
|
||||||
|
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||||
|
"""Set new target fan mode."""
|
||||||
|
|
||||||
|
compit_fan_mode = FAN_MODE_TO_COMPIT_FAN_MODE.get(fan_mode)
|
||||||
|
if compit_fan_mode is None:
|
||||||
|
raise ServiceValidationError(f"Invalid fan mode: {fan_mode}")
|
||||||
|
|
||||||
|
await self.set_parameter_value(CompitParameter.FAN_MODE, compit_fan_mode.value)
|
||||||
|
|
||||||
|
async def set_parameter_value(self, parameter: CompitParameter, value: int) -> None:
|
||||||
|
"""Call the API to set a parameter to a new value."""
|
||||||
|
await self.coordinator.connector.set_device_parameter(
|
||||||
|
self.device_id, parameter, value
|
||||||
|
)
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
def get_parameter_value(self, parameter: CompitParameter) -> Param | None:
|
||||||
|
"""Get the parameter value from the device state."""
|
||||||
|
return self.coordinator.connector.get_device_parameter(
|
||||||
|
self.device_id, parameter
|
||||||
|
)
|
110
homeassistant/components/compit/config_flow.py
Normal file
110
homeassistant/components/compit/config_flow.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
"""Config flow for Compit integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_EMAIL): str,
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
STEP_REAUTH_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CompitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Compit."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self,
|
||||||
|
user_input: dict[str, Any] | None = None,
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the initial step."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
if user_input is not None:
|
||||||
|
session = async_create_clientsession(self.hass)
|
||||||
|
api = CompitApiConnector(session)
|
||||||
|
success = False
|
||||||
|
try:
|
||||||
|
success = await api.init(
|
||||||
|
user_input[CONF_EMAIL],
|
||||||
|
user_input[CONF_PASSWORD],
|
||||||
|
self.hass.config.language,
|
||||||
|
)
|
||||||
|
except CannotConnect:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidAuth:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
if not success:
|
||||||
|
# Api returned unexpected result but no exception
|
||||||
|
_LOGGER.error("Compit api returned unexpected result")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||||
|
|
||||||
|
if self.source == SOURCE_REAUTH:
|
||||||
|
self._abort_if_unique_id_mismatch()
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self._get_reauth_entry(), data_updates=user_input
|
||||||
|
)
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=user_input[CONF_EMAIL], data=user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult:
|
||||||
|
"""Handle re-auth."""
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Confirm re-authentication."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
reauth_entry = self._get_reauth_entry()
|
||||||
|
reauth_entry_data = reauth_entry.data
|
||||||
|
|
||||||
|
if user_input:
|
||||||
|
# Reuse async_step_user with combined credentials
|
||||||
|
return await self.async_step_user(
|
||||||
|
{
|
||||||
|
CONF_EMAIL: reauth_entry_data[CONF_EMAIL],
|
||||||
|
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=STEP_REAUTH_SCHEMA,
|
||||||
|
description_placeholders={CONF_EMAIL: reauth_entry_data[CONF_EMAIL]},
|
||||||
|
errors=errors,
|
||||||
|
)
|
4
homeassistant/components/compit/const.py
Normal file
4
homeassistant/components/compit/const.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
"""Constants for the Compit integration."""
|
||||||
|
|
||||||
|
DOMAIN = "compit"
|
||||||
|
MANUFACTURER_NAME = "Compit"
|
43
homeassistant/components/compit/coordinator.py
Normal file
43
homeassistant/components/compit/coordinator.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
"""Define an object to manage fetching Compit data."""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from compit_inext_api import CompitApiConnector, DeviceInstance
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=30)
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type CompitConfigEntry = ConfigEntry[CompitDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
class CompitDataUpdateCoordinator(DataUpdateCoordinator[dict[int, DeviceInstance]]):
|
||||||
|
"""Class to manage fetching data from the API."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
connector: CompitApiConnector,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize."""
|
||||||
|
self.connector = connector
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=SCAN_INTERVAL,
|
||||||
|
config_entry=config_entry,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> dict[int, DeviceInstance]:
|
||||||
|
"""Update data via library."""
|
||||||
|
await self.connector.update_state(device_id=None) # Update all devices
|
||||||
|
return self.connector.all_devices
|
12
homeassistant/components/compit/manifest.json
Normal file
12
homeassistant/components/compit/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"domain": "compit",
|
||||||
|
"name": "Compit",
|
||||||
|
"codeowners": ["@Przemko92"],
|
||||||
|
"config_flow": true,
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/compit",
|
||||||
|
"integration_type": "hub",
|
||||||
|
"iot_class": "cloud_polling",
|
||||||
|
"loggers": ["compit"],
|
||||||
|
"quality_scale": "bronze",
|
||||||
|
"requirements": ["compit-inext-api==0.3.1"]
|
||||||
|
}
|
86
homeassistant/components/compit/quality_scale.yaml
Normal file
86
homeassistant/components/compit/quality_scale.yaml
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not provide additional actions.
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not use any common modules.
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not provide additional actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Entities of this integration does not explicitly subscribe to events.
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
|
||||||
|
# Silver
|
||||||
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not provide additional actions.
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not have an options flow.
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable: todo
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable: todo
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow: done
|
||||||
|
test-coverage: todo
|
||||||
|
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: todo
|
||||||
|
discovery-update-info:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration is a cloud service and does not support discovery.
|
||||||
|
discovery: todo
|
||||||
|
docs-data-update: todo
|
||||||
|
docs-examples: todo
|
||||||
|
docs-known-limitations: todo
|
||||||
|
docs-supported-devices: done
|
||||||
|
docs-supported-functions: todo
|
||||||
|
docs-troubleshooting: todo
|
||||||
|
docs-use-cases: todo
|
||||||
|
dynamic-devices: todo
|
||||||
|
entity-category: done
|
||||||
|
entity-device-class: done
|
||||||
|
entity-disabled-by-default:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not have any entities that should disabled by default.
|
||||||
|
entity-translations: done
|
||||||
|
exception-translations: todo
|
||||||
|
icon-translations:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
There is no need for icon translations.
|
||||||
|
reconfiguration-flow: todo
|
||||||
|
repair-issues: todo
|
||||||
|
stale-devices: todo
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession: todo
|
||||||
|
strict-typing: done
|
35
homeassistant/components/compit/strings.json
Normal file
35
homeassistant/components/compit/strings.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"description": "Please enter your https://inext.compit.pl/ credentials.",
|
||||||
|
"title": "Connect to Compit iNext",
|
||||||
|
"data": {
|
||||||
|
"email": "[%key:common::config_flow::data::email%]",
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"email": "The email address of your inext.compit.pl account",
|
||||||
|
"password": "The password of your inext.compit.pl account"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"description": "Please update your password for {email}",
|
||||||
|
"data": {
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"password": "[%key:component::compit::config::step::user::data_description::password%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
|
"abort": {
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -50,14 +50,13 @@ from .const import (
|
|||||||
ATTR_LANGUAGE,
|
ATTR_LANGUAGE,
|
||||||
ATTR_TEXT,
|
ATTR_TEXT,
|
||||||
DATA_COMPONENT,
|
DATA_COMPONENT,
|
||||||
DATA_DEFAULT_ENTITY,
|
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
HOME_ASSISTANT_AGENT,
|
HOME_ASSISTANT_AGENT,
|
||||||
SERVICE_PROCESS,
|
SERVICE_PROCESS,
|
||||||
SERVICE_RELOAD,
|
SERVICE_RELOAD,
|
||||||
ConversationEntityFeature,
|
ConversationEntityFeature,
|
||||||
)
|
)
|
||||||
from .default_agent import DefaultAgent, async_setup_default_agent
|
from .default_agent import async_setup_default_agent
|
||||||
from .entity import ConversationEntity
|
from .entity import ConversationEntity
|
||||||
from .http import async_setup as async_setup_conversation_http
|
from .http import async_setup as async_setup_conversation_http
|
||||||
from .models import AbstractConversationAgent, ConversationInput, ConversationResult
|
from .models import AbstractConversationAgent, ConversationInput, ConversationResult
|
||||||
@@ -142,7 +141,7 @@ def async_unset_agent(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: ConfigEntry,
|
config_entry: ConfigEntry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set the agent to handle the conversations."""
|
"""Unset the agent to handle the conversations."""
|
||||||
get_agent_manager(hass).async_unset_agent(config_entry.entry_id)
|
get_agent_manager(hass).async_unset_agent(config_entry.entry_id)
|
||||||
|
|
||||||
|
|
||||||
@@ -241,10 +240,10 @@ async def async_handle_sentence_triggers(
|
|||||||
|
|
||||||
Returns None if no match occurred.
|
Returns None if no match occurred.
|
||||||
"""
|
"""
|
||||||
default_agent = async_get_agent(hass)
|
agent = get_agent_manager(hass).default_agent
|
||||||
assert isinstance(default_agent, DefaultAgent)
|
assert agent is not None
|
||||||
|
|
||||||
return await default_agent.async_handle_sentence_triggers(user_input)
|
return await agent.async_handle_sentence_triggers(user_input)
|
||||||
|
|
||||||
|
|
||||||
async def async_handle_intents(
|
async def async_handle_intents(
|
||||||
@@ -257,12 +256,10 @@ async def async_handle_intents(
|
|||||||
|
|
||||||
Returns None if no match occurred.
|
Returns None if no match occurred.
|
||||||
"""
|
"""
|
||||||
default_agent = async_get_agent(hass)
|
agent = get_agent_manager(hass).default_agent
|
||||||
assert isinstance(default_agent, DefaultAgent)
|
assert agent is not None
|
||||||
|
|
||||||
return await default_agent.async_handle_intents(
|
return await agent.async_handle_intents(user_input, intent_filter=intent_filter)
|
||||||
user_input, intent_filter=intent_filter
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
@@ -298,9 +295,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
async def handle_reload(service: ServiceCall) -> None:
|
async def handle_reload(service: ServiceCall) -> None:
|
||||||
"""Reload intents."""
|
"""Reload intents."""
|
||||||
await hass.data[DATA_DEFAULT_ENTITY].async_reload(
|
agent = get_agent_manager(hass).default_agent
|
||||||
language=service.data.get(ATTR_LANGUAGE)
|
if agent is not None:
|
||||||
)
|
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
|
||||||
|
|
||||||
hass.services.async_register(
|
hass.services.async_register(
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import dataclasses
|
import dataclasses
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -12,7 +12,7 @@ from homeassistant.core import Context, HomeAssistant, async_get_hass, callback
|
|||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import config_validation as cv, intent, singleton
|
from homeassistant.helpers import config_validation as cv, intent, singleton
|
||||||
|
|
||||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY, HOME_ASSISTANT_AGENT
|
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT
|
||||||
from .entity import ConversationEntity
|
from .entity import ConversationEntity
|
||||||
from .models import (
|
from .models import (
|
||||||
AbstractConversationAgent,
|
AbstractConversationAgent,
|
||||||
@@ -28,6 +28,9 @@ from .trace import (
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .default_agent import DefaultAgent
|
||||||
|
|
||||||
|
|
||||||
@singleton.singleton("conversation_agent")
|
@singleton.singleton("conversation_agent")
|
||||||
@callback
|
@callback
|
||||||
@@ -49,8 +52,10 @@ def async_get_agent(
|
|||||||
hass: HomeAssistant, agent_id: str | None = None
|
hass: HomeAssistant, agent_id: str | None = None
|
||||||
) -> AbstractConversationAgent | ConversationEntity | None:
|
) -> AbstractConversationAgent | ConversationEntity | None:
|
||||||
"""Get specified agent."""
|
"""Get specified agent."""
|
||||||
|
manager = get_agent_manager(hass)
|
||||||
|
|
||||||
if agent_id is None or agent_id == HOME_ASSISTANT_AGENT:
|
if agent_id is None or agent_id == HOME_ASSISTANT_AGENT:
|
||||||
return hass.data[DATA_DEFAULT_ENTITY]
|
return manager.default_agent
|
||||||
|
|
||||||
if "." in agent_id:
|
if "." in agent_id:
|
||||||
return hass.data[DATA_COMPONENT].get_entity(agent_id)
|
return hass.data[DATA_COMPONENT].get_entity(agent_id)
|
||||||
@@ -71,6 +76,7 @@ async def async_converse(
|
|||||||
language: str | None = None,
|
language: str | None = None,
|
||||||
agent_id: str | None = None,
|
agent_id: str | None = None,
|
||||||
device_id: str | None = None,
|
device_id: str | None = None,
|
||||||
|
satellite_id: str | None = None,
|
||||||
extra_system_prompt: str | None = None,
|
extra_system_prompt: str | None = None,
|
||||||
) -> ConversationResult:
|
) -> ConversationResult:
|
||||||
"""Process text and get intent."""
|
"""Process text and get intent."""
|
||||||
@@ -97,6 +103,7 @@ async def async_converse(
|
|||||||
context=context,
|
context=context,
|
||||||
conversation_id=conversation_id,
|
conversation_id=conversation_id,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
|
satellite_id=satellite_id,
|
||||||
language=language,
|
language=language,
|
||||||
agent_id=agent_id,
|
agent_id=agent_id,
|
||||||
extra_system_prompt=extra_system_prompt,
|
extra_system_prompt=extra_system_prompt,
|
||||||
@@ -132,6 +139,7 @@ class AgentManager:
|
|||||||
"""Initialize the conversation agents."""
|
"""Initialize the conversation agents."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||||
|
self.default_agent: DefaultAgent | None = None
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
|
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
|
||||||
@@ -180,3 +188,7 @@ class AgentManager:
|
|||||||
def async_unset_agent(self, agent_id: str) -> None:
|
def async_unset_agent(self, agent_id: str) -> None:
|
||||||
"""Unset the agent."""
|
"""Unset the agent."""
|
||||||
self._agents.pop(agent_id, None)
|
self._agents.pop(agent_id, None)
|
||||||
|
|
||||||
|
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||||
|
"""Set up the default agent."""
|
||||||
|
self.default_agent = agent
|
||||||
|
@@ -10,11 +10,9 @@ from homeassistant.util.hass_dict import HassKey
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from homeassistant.helpers.entity_component import EntityComponent
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
|
|
||||||
from .default_agent import DefaultAgent
|
|
||||||
from .entity import ConversationEntity
|
from .entity import ConversationEntity
|
||||||
|
|
||||||
DOMAIN = "conversation"
|
DOMAIN = "conversation"
|
||||||
DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
|
||||||
HOME_ASSISTANT_AGENT = "conversation.home_assistant"
|
HOME_ASSISTANT_AGENT = "conversation.home_assistant"
|
||||||
|
|
||||||
ATTR_TEXT = "text"
|
ATTR_TEXT = "text"
|
||||||
@@ -26,7 +24,6 @@ SERVICE_PROCESS = "process"
|
|||||||
SERVICE_RELOAD = "reload"
|
SERVICE_RELOAD = "reload"
|
||||||
|
|
||||||
DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN)
|
DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN)
|
||||||
DATA_DEFAULT_ENTITY: HassKey[DefaultAgent] = HassKey(f"{DOMAIN}_default_entity")
|
|
||||||
|
|
||||||
|
|
||||||
class ConversationEntityFeature(IntFlag):
|
class ConversationEntityFeature(IntFlag):
|
||||||
|
@@ -68,13 +68,9 @@ from homeassistant.helpers.event import async_track_state_added_domain
|
|||||||
from homeassistant.util import language as language_util
|
from homeassistant.util import language as language_util
|
||||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||||
|
|
||||||
|
from .agent_manager import get_agent_manager
|
||||||
from .chat_log import AssistantContent, ChatLog
|
from .chat_log import AssistantContent, ChatLog
|
||||||
from .const import (
|
from .const import DOMAIN, ConversationEntityFeature
|
||||||
DATA_DEFAULT_ENTITY,
|
|
||||||
DEFAULT_EXPOSED_ATTRIBUTES,
|
|
||||||
DOMAIN,
|
|
||||||
ConversationEntityFeature,
|
|
||||||
)
|
|
||||||
from .entity import ConversationEntity
|
from .entity import ConversationEntity
|
||||||
from .models import ConversationInput, ConversationResult
|
from .models import ConversationInput, ConversationResult
|
||||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||||
@@ -83,6 +79,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
||||||
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||||
|
|
||||||
|
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||||
|
|
||||||
REGEX_TYPE = type(re.compile(""))
|
REGEX_TYPE = type(re.compile(""))
|
||||||
TRIGGER_CALLBACK_TYPE = Callable[
|
TRIGGER_CALLBACK_TYPE = Callable[
|
||||||
[ConversationInput, RecognizeResult], Awaitable[str | None]
|
[ConversationInput, RecognizeResult], Awaitable[str | None]
|
||||||
@@ -155,8 +153,8 @@ class IntentCacheKey:
|
|||||||
language: str
|
language: str
|
||||||
"""Language of text."""
|
"""Language of text."""
|
||||||
|
|
||||||
device_id: str | None
|
satellite_id: str | None
|
||||||
"""Device id from user input."""
|
"""Satellite id from user input."""
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
@@ -209,9 +207,9 @@ async def async_setup_default_agent(
|
|||||||
config_intents: dict[str, Any],
|
config_intents: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up entity registry listener for the default agent."""
|
"""Set up entity registry listener for the default agent."""
|
||||||
entity = DefaultAgent(hass, config_intents)
|
agent = DefaultAgent(hass, config_intents)
|
||||||
await entity_component.async_add_entities([entity])
|
await entity_component.async_add_entities([agent])
|
||||||
hass.data[DATA_DEFAULT_ENTITY] = entity
|
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||||
|
|
||||||
@core.callback
|
@core.callback
|
||||||
def async_entity_state_listener(
|
def async_entity_state_listener(
|
||||||
@@ -445,9 +443,15 @@ class DefaultAgent(ConversationEntity):
|
|||||||
}
|
}
|
||||||
for entity in result.entities_list
|
for entity in result.entities_list
|
||||||
}
|
}
|
||||||
device_area = self._get_device_area(user_input.device_id)
|
|
||||||
if device_area:
|
satellite_id = user_input.satellite_id
|
||||||
slots["preferred_area_id"] = {"value": device_area.id}
|
device_id = user_input.device_id
|
||||||
|
satellite_area, device_id = self._get_satellite_area_and_device(
|
||||||
|
satellite_id, device_id
|
||||||
|
)
|
||||||
|
if satellite_area is not None:
|
||||||
|
slots["preferred_area_id"] = {"value": satellite_area.id}
|
||||||
|
|
||||||
async_conversation_trace_append(
|
async_conversation_trace_append(
|
||||||
ConversationTraceEventType.TOOL_CALL,
|
ConversationTraceEventType.TOOL_CALL,
|
||||||
{
|
{
|
||||||
@@ -469,7 +473,8 @@ class DefaultAgent(ConversationEntity):
|
|||||||
user_input.context,
|
user_input.context,
|
||||||
language,
|
language,
|
||||||
assistant=DOMAIN,
|
assistant=DOMAIN,
|
||||||
device_id=user_input.device_id,
|
device_id=device_id,
|
||||||
|
satellite_id=satellite_id,
|
||||||
conversation_agent_id=user_input.agent_id,
|
conversation_agent_id=user_input.agent_id,
|
||||||
)
|
)
|
||||||
except intent.MatchFailedError as match_error:
|
except intent.MatchFailedError as match_error:
|
||||||
@@ -535,7 +540,9 @@ class DefaultAgent(ConversationEntity):
|
|||||||
|
|
||||||
# Try cache first
|
# Try cache first
|
||||||
cache_key = IntentCacheKey(
|
cache_key = IntentCacheKey(
|
||||||
text=user_input.text, language=language, device_id=user_input.device_id
|
text=user_input.text,
|
||||||
|
language=language,
|
||||||
|
satellite_id=user_input.satellite_id,
|
||||||
)
|
)
|
||||||
cache_value = self._intent_cache.get(cache_key)
|
cache_value = self._intent_cache.get(cache_key)
|
||||||
if cache_value is not None:
|
if cache_value is not None:
|
||||||
@@ -845,7 +852,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
context = {"domain": state.domain}
|
context = {"domain": state.domain}
|
||||||
if state.attributes:
|
if state.attributes:
|
||||||
# Include some attributes
|
# Include some attributes
|
||||||
for attr in DEFAULT_EXPOSED_ATTRIBUTES:
|
for attr in _DEFAULT_EXPOSED_ATTRIBUTES:
|
||||||
if attr not in state.attributes:
|
if attr not in state.attributes:
|
||||||
continue
|
continue
|
||||||
context[attr] = state.attributes[attr]
|
context[attr] = state.attributes[attr]
|
||||||
@@ -1305,28 +1312,40 @@ class DefaultAgent(ConversationEntity):
|
|||||||
self, user_input: ConversationInput
|
self, user_input: ConversationInput
|
||||||
) -> dict[str, Any] | None:
|
) -> dict[str, Any] | None:
|
||||||
"""Return intent recognition context for user input."""
|
"""Return intent recognition context for user input."""
|
||||||
if not user_input.device_id:
|
satellite_area, _ = self._get_satellite_area_and_device(
|
||||||
|
user_input.satellite_id, user_input.device_id
|
||||||
|
)
|
||||||
|
if satellite_area is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
device_area = self._get_device_area(user_input.device_id)
|
return {"area": {"value": satellite_area.name, "text": satellite_area.name}}
|
||||||
if device_area is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return {"area": {"value": device_area.name, "text": device_area.name}}
|
def _get_satellite_area_and_device(
|
||||||
|
self, satellite_id: str | None, device_id: str | None = None
|
||||||
|
) -> tuple[ar.AreaEntry | None, str | None]:
|
||||||
|
"""Return area entry and device id."""
|
||||||
|
hass = self.hass
|
||||||
|
|
||||||
def _get_device_area(self, device_id: str | None) -> ar.AreaEntry | None:
|
area_id: str | None = None
|
||||||
"""Return area object for given device identifier."""
|
|
||||||
if device_id is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
devices = dr.async_get(self.hass)
|
if (
|
||||||
device = devices.async_get(device_id)
|
satellite_id is not None
|
||||||
if (device is None) or (device.area_id is None):
|
and (entity_entry := er.async_get(hass).async_get(satellite_id)) is not None
|
||||||
return None
|
):
|
||||||
|
area_id = entity_entry.area_id
|
||||||
|
device_id = entity_entry.device_id
|
||||||
|
|
||||||
areas = ar.async_get(self.hass)
|
if (
|
||||||
|
area_id is None
|
||||||
|
and device_id is not None
|
||||||
|
and (device_entry := dr.async_get(hass).async_get(device_id)) is not None
|
||||||
|
):
|
||||||
|
area_id = device_entry.area_id
|
||||||
|
|
||||||
return areas.async_get_area(device.area_id)
|
if area_id is None:
|
||||||
|
return None, device_id
|
||||||
|
|
||||||
|
return ar.async_get(hass).async_get_area(area_id), device_id
|
||||||
|
|
||||||
def _get_error_text(
|
def _get_error_text(
|
||||||
self,
|
self,
|
||||||
|
@@ -25,7 +25,7 @@ from .agent_manager import (
|
|||||||
async_get_agent,
|
async_get_agent,
|
||||||
get_agent_manager,
|
get_agent_manager,
|
||||||
)
|
)
|
||||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
|
from .const import DATA_COMPONENT
|
||||||
from .default_agent import (
|
from .default_agent import (
|
||||||
METADATA_CUSTOM_FILE,
|
METADATA_CUSTOM_FILE,
|
||||||
METADATA_CUSTOM_SENTENCE,
|
METADATA_CUSTOM_SENTENCE,
|
||||||
@@ -169,7 +169,8 @@ async def websocket_list_sentences(
|
|||||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||||
) -> None:
|
) -> None:
|
||||||
"""List custom registered sentences."""
|
"""List custom registered sentences."""
|
||||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
agent = get_agent_manager(hass).default_agent
|
||||||
|
assert agent is not None
|
||||||
|
|
||||||
sentences = []
|
sentences = []
|
||||||
for trigger_data in agent.trigger_sentences:
|
for trigger_data in agent.trigger_sentences:
|
||||||
@@ -191,7 +192,8 @@ async def websocket_hass_agent_debug(
|
|||||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Return intents that would be matched by the default agent for a list of sentences."""
|
"""Return intents that would be matched by the default agent for a list of sentences."""
|
||||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
agent = get_agent_manager(hass).default_agent
|
||||||
|
assert agent is not None
|
||||||
|
|
||||||
# Return results for each sentence in the same order as the input.
|
# Return results for each sentence in the same order as the input.
|
||||||
result_dicts: list[dict[str, Any] | None] = []
|
result_dicts: list[dict[str, Any] | None] = []
|
||||||
@@ -201,6 +203,7 @@ async def websocket_hass_agent_debug(
|
|||||||
context=connection.context(msg),
|
context=connection.context(msg),
|
||||||
conversation_id=None,
|
conversation_id=None,
|
||||||
device_id=msg.get("device_id"),
|
device_id=msg.get("device_id"),
|
||||||
|
satellite_id=None,
|
||||||
language=msg.get("language", hass.config.language),
|
language=msg.get("language", hass.config.language),
|
||||||
agent_id=agent.entity_id,
|
agent_id=agent.entity_id,
|
||||||
)
|
)
|
||||||
|
@@ -1,4 +1,9 @@
|
|||||||
{
|
{
|
||||||
|
"entity_component": {
|
||||||
|
"_": {
|
||||||
|
"default": "mdi:forum-outline"
|
||||||
|
}
|
||||||
|
},
|
||||||
"services": {
|
"services": {
|
||||||
"process": {
|
"process": {
|
||||||
"service": "mdi:message-processing"
|
"service": "mdi:message-processing"
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||||
"dependencies": ["http", "intent"],
|
"dependencies": ["http", "intent"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||||
"integration_type": "system",
|
"integration_type": "entity",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.3"]
|
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.3"]
|
||||||
}
|
}
|
||||||
|
@@ -37,6 +37,9 @@ class ConversationInput:
|
|||||||
device_id: str | None
|
device_id: str | None
|
||||||
"""Unique identifier for the device."""
|
"""Unique identifier for the device."""
|
||||||
|
|
||||||
|
satellite_id: str | None
|
||||||
|
"""Unique identifier for the satellite."""
|
||||||
|
|
||||||
language: str
|
language: str
|
||||||
"""Language of the request."""
|
"""Language of the request."""
|
||||||
|
|
||||||
@@ -53,6 +56,7 @@ class ConversationInput:
|
|||||||
"context": self.context.as_dict(),
|
"context": self.context.as_dict(),
|
||||||
"conversation_id": self.conversation_id,
|
"conversation_id": self.conversation_id,
|
||||||
"device_id": self.device_id,
|
"device_id": self.device_id,
|
||||||
|
"satellite_id": self.satellite_id,
|
||||||
"language": self.language,
|
"language": self.language,
|
||||||
"agent_id": self.agent_id,
|
"agent_id": self.agent_id,
|
||||||
"extra_system_prompt": self.extra_system_prompt,
|
"extra_system_prompt": self.extra_system_prompt,
|
||||||
|
@@ -15,12 +15,13 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.const import CONF_COMMAND, CONF_PLATFORM
|
from homeassistant.const import CONF_COMMAND, CONF_PLATFORM
|
||||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant
|
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||||
from homeassistant.helpers.script import ScriptRunResult
|
from homeassistant.helpers.script import ScriptRunResult
|
||||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType
|
from homeassistant.helpers.typing import UNDEFINED, ConfigType
|
||||||
|
|
||||||
from .const import DATA_DEFAULT_ENTITY, DOMAIN
|
from .agent_manager import get_agent_manager
|
||||||
|
from .const import DOMAIN
|
||||||
from .models import ConversationInput
|
from .models import ConversationInput
|
||||||
|
|
||||||
|
|
||||||
@@ -70,6 +71,8 @@ async def async_attach_trigger(
|
|||||||
trigger_data = trigger_info["trigger_data"]
|
trigger_data = trigger_info["trigger_data"]
|
||||||
sentences = config.get(CONF_COMMAND, [])
|
sentences = config.get(CONF_COMMAND, [])
|
||||||
|
|
||||||
|
ent_reg = er.async_get(hass)
|
||||||
|
|
||||||
job = HassJob(action)
|
job = HassJob(action)
|
||||||
|
|
||||||
async def call_action(
|
async def call_action(
|
||||||
@@ -91,6 +94,14 @@ async def async_attach_trigger(
|
|||||||
for entity_name, entity in result.entities.items()
|
for entity_name, entity in result.entities.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
satellite_id = user_input.satellite_id
|
||||||
|
device_id = user_input.device_id
|
||||||
|
if (
|
||||||
|
satellite_id is not None
|
||||||
|
and (satellite_entry := ent_reg.async_get(satellite_id)) is not None
|
||||||
|
):
|
||||||
|
device_id = satellite_entry.device_id
|
||||||
|
|
||||||
trigger_input: dict[str, Any] = { # Satisfy type checker
|
trigger_input: dict[str, Any] = { # Satisfy type checker
|
||||||
**trigger_data,
|
**trigger_data,
|
||||||
"platform": DOMAIN,
|
"platform": DOMAIN,
|
||||||
@@ -99,7 +110,8 @@ async def async_attach_trigger(
|
|||||||
"slots": { # direct access to values
|
"slots": { # direct access to values
|
||||||
entity_name: entity["value"] for entity_name, entity in details.items()
|
entity_name: entity["value"] for entity_name, entity in details.items()
|
||||||
},
|
},
|
||||||
"device_id": user_input.device_id,
|
"device_id": device_id,
|
||||||
|
"satellite_id": satellite_id,
|
||||||
"user_input": user_input.as_dict(),
|
"user_input": user_input.as_dict(),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,4 +134,6 @@ async def async_attach_trigger(
|
|||||||
# two trigger copies for who will provide a response.
|
# two trigger copies for who will provide a response.
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return hass.data[DATA_DEFAULT_ENTITY].register_trigger(sentences, call_action)
|
agent = get_agent_manager(hass).default_agent
|
||||||
|
assert agent is not None
|
||||||
|
return agent.register_trigger(sentences, call_action)
|
||||||
|
@@ -99,7 +99,7 @@ T = TypeVar(
|
|||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
class DeconzSensorDescription(Generic[T], SensorEntityDescription):
|
class DeconzSensorDescription(SensorEntityDescription, Generic[T]):
|
||||||
"""Class describing deCONZ binary sensor entities."""
|
"""Class describing deCONZ binary sensor entities."""
|
||||||
|
|
||||||
instance_check: type[T] | None = None
|
instance_check: type[T] | None = None
|
||||||
|
@@ -19,6 +19,7 @@
|
|||||||
"ssdp",
|
"ssdp",
|
||||||
"stream",
|
"stream",
|
||||||
"sun",
|
"sun",
|
||||||
|
"usage_prediction",
|
||||||
"usb",
|
"usb",
|
||||||
"webhook",
|
"webhook",
|
||||||
"zeroconf"
|
"zeroconf"
|
||||||
|
@@ -43,3 +43,5 @@ class DelugeSensorType(enum.StrEnum):
|
|||||||
UPLOAD_SPEED_SENSOR = "upload_speed"
|
UPLOAD_SPEED_SENSOR = "upload_speed"
|
||||||
PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR = "protocol_traffic_upload_speed"
|
PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR = "protocol_traffic_upload_speed"
|
||||||
PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR = "protocol_traffic_download_speed"
|
PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR = "protocol_traffic_download_speed"
|
||||||
|
DOWNLOADING_COUNT_SENSOR = "downloading_count"
|
||||||
|
SEEDING_COUNT_SENSOR = "seeding_count"
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections import Counter
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from ssl import SSLError
|
from ssl import SSLError
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -14,11 +15,22 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import LOGGER, DelugeGetSessionStatusKeys
|
from .const import LOGGER, DelugeGetSessionStatusKeys, DelugeSensorType
|
||||||
|
|
||||||
type DelugeConfigEntry = ConfigEntry[DelugeDataUpdateCoordinator]
|
type DelugeConfigEntry = ConfigEntry[DelugeDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
def count_states(data: dict[str, Any]) -> dict[str, int]:
|
||||||
|
"""Count the states of the provided torrents."""
|
||||||
|
|
||||||
|
counts = Counter(torrent[b"state"].decode() for torrent in data.values())
|
||||||
|
|
||||||
|
return {
|
||||||
|
DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value: counts.get("Downloading", 0),
|
||||||
|
DelugeSensorType.SEEDING_COUNT_SENSOR.value: counts.get("Seeding", 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class DelugeDataUpdateCoordinator(
|
class DelugeDataUpdateCoordinator(
|
||||||
DataUpdateCoordinator[dict[Platform, dict[str, Any]]]
|
DataUpdateCoordinator[dict[Platform, dict[str, Any]]]
|
||||||
):
|
):
|
||||||
@@ -39,19 +51,22 @@ class DelugeDataUpdateCoordinator(
|
|||||||
)
|
)
|
||||||
self.api = api
|
self.api = api
|
||||||
|
|
||||||
async def _async_update_data(self) -> dict[Platform, dict[str, Any]]:
|
def _get_deluge_data(self):
|
||||||
"""Get the latest data from Deluge and updates the state."""
|
"""Get the latest data from Deluge."""
|
||||||
|
|
||||||
data = {}
|
data = {}
|
||||||
try:
|
try:
|
||||||
_data = await self.hass.async_add_executor_job(
|
data["session_status"] = self.api.call(
|
||||||
self.api.call,
|
|
||||||
"core.get_session_status",
|
"core.get_session_status",
|
||||||
[iter_member.value for iter_member in list(DelugeGetSessionStatusKeys)],
|
[iter_member.value for iter_member in list(DelugeGetSessionStatusKeys)],
|
||||||
)
|
)
|
||||||
data[Platform.SENSOR] = {k.decode(): v for k, v in _data.items()}
|
data["torrents_status_state"] = self.api.call(
|
||||||
data[Platform.SWITCH] = await self.hass.async_add_executor_job(
|
"core.get_torrents_status", {}, ["state"]
|
||||||
self.api.call, "core.get_torrents_status", {}, ["paused"]
|
|
||||||
)
|
)
|
||||||
|
data["torrents_status_paused"] = self.api.call(
|
||||||
|
"core.get_torrents_status", {}, ["paused"]
|
||||||
|
)
|
||||||
|
|
||||||
except (
|
except (
|
||||||
ConnectionRefusedError,
|
ConnectionRefusedError,
|
||||||
TimeoutError,
|
TimeoutError,
|
||||||
@@ -66,4 +81,18 @@ class DelugeDataUpdateCoordinator(
|
|||||||
) from ex
|
) from ex
|
||||||
LOGGER.error("Unknown error connecting to Deluge: %s", ex)
|
LOGGER.error("Unknown error connecting to Deluge: %s", ex)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> dict[Platform, dict[str, Any]]:
|
||||||
|
"""Get the latest data from Deluge and updates the state."""
|
||||||
|
|
||||||
|
deluge_data = await self.hass.async_add_executor_job(self._get_deluge_data)
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
data[Platform.SENSOR] = {
|
||||||
|
k.decode(): v for k, v in deluge_data["session_status"].items()
|
||||||
|
}
|
||||||
|
data[Platform.SENSOR].update(count_states(deluge_data["torrents_status_state"]))
|
||||||
|
data[Platform.SWITCH] = deluge_data["torrents_status_paused"]
|
||||||
return data
|
return data
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user