mirror of
https://github.com/home-assistant/core.git
synced 2025-11-25 18:48:05 +00:00
Compare commits
774 Commits
2025.11.3
...
adguard/ad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
653a126791 | ||
|
|
05917a9fcd | ||
|
|
bd0ab4d1fe | ||
|
|
80151b205d | ||
|
|
4488fdd2d6 | ||
|
|
a6e0bea805 | ||
|
|
994619e179 | ||
|
|
4db5be73a7 | ||
|
|
3cfedd1721 | ||
|
|
2f1301abaf | ||
|
|
21d61ef401 | ||
|
|
6850f9622a | ||
|
|
2b2bb79505 | ||
|
|
d97998e2e1 | ||
|
|
3ef62c97ca | ||
|
|
5cca95ab2f | ||
|
|
a4f0a21c8e | ||
|
|
11a2b5df6a | ||
|
|
07e2c8a610 | ||
|
|
43783ed896 | ||
|
|
a206604df5 | ||
|
|
2e82ac81b2 | ||
|
|
5139e9e566 | ||
|
|
c53674531c | ||
|
|
a04244ad25 | ||
|
|
b27b357b91 | ||
|
|
01e38853c0 | ||
|
|
06158fc9a1 | ||
|
|
e5968084a2 | ||
|
|
263839a6c0 | ||
|
|
931b2c2db0 | ||
|
|
8e26112db1 | ||
|
|
b1286af423 | ||
|
|
bd02e279cf | ||
|
|
6e5be843d6 | ||
|
|
5b1d86a04b | ||
|
|
1514013c3b | ||
|
|
54ed290cc1 | ||
|
|
1106f4f0e2 | ||
|
|
f73e92a34a | ||
|
|
74ad5066e2 | ||
|
|
4202a665af | ||
|
|
c9ddbe39ce | ||
|
|
8a2e8d2c61 | ||
|
|
ca2e8bfb56 | ||
|
|
c0772f3957 | ||
|
|
0b96da3b24 | ||
|
|
4c07b2b290 | ||
|
|
f699d95ea0 | ||
|
|
f6b9a0eb29 | ||
|
|
71c665ed49 | ||
|
|
85a1afb174 | ||
|
|
9668a68c28 | ||
|
|
a06aa8edfe | ||
|
|
4e30a5d930 | ||
|
|
696550a7f2 | ||
|
|
c064d23a99 | ||
|
|
ac7b063c2c | ||
|
|
e0778c8e2e | ||
|
|
2ba5a96d5b | ||
|
|
13c9fb6e37 | ||
|
|
102bb1f694 | ||
|
|
e5b2d44e8e | ||
|
|
4d4ad900b1 | ||
|
|
acc136af19 | ||
|
|
0f12a40eb2 | ||
|
|
bf124daf72 | ||
|
|
1682ced5cc | ||
|
|
80b316bc70 | ||
|
|
00d2340d4b | ||
|
|
514a329580 | ||
|
|
f2b8bb01bf | ||
|
|
30153ab059 | ||
|
|
2957b15ede | ||
|
|
12ace95f3e | ||
|
|
babe19767d | ||
|
|
d01843e1ab | ||
|
|
9964cb512a | ||
|
|
ae38214b7c | ||
|
|
9812286801 | ||
|
|
32a40e5919 | ||
|
|
97de944a14 | ||
|
|
c9bd87f4b3 | ||
|
|
ac46568996 | ||
|
|
7c1b8ee02c | ||
|
|
aa6901265d | ||
|
|
b76e9ad1c0 | ||
|
|
edb8007c65 | ||
|
|
956a29411f | ||
|
|
1a2361050b | ||
|
|
0c9e92f6f9 | ||
|
|
bfdff46859 | ||
|
|
9a22808499 | ||
|
|
88b373af41 | ||
|
|
dea2f37e8f | ||
|
|
30cce68e0b | ||
|
|
985eff972a | ||
|
|
31ca332158 | ||
|
|
bf76c1601d | ||
|
|
e572f8d48f | ||
|
|
482b5d49a3 | ||
|
|
126fd217e7 | ||
|
|
0327b0e1ec | ||
|
|
3d5a7b4813 | ||
|
|
e0bb30f63b | ||
|
|
e5ae58c5df | ||
|
|
13e4bb4b93 | ||
|
|
d5fd27d2a2 | ||
|
|
0a034b9984 | ||
|
|
6a8106c0eb | ||
|
|
2cacfc7413 | ||
|
|
388ab5c16c | ||
|
|
81ea6f8c25 | ||
|
|
4f885994b7 | ||
|
|
25e2c9ee80 | ||
|
|
12c04f5571 | ||
|
|
3ad1c6a47a | ||
|
|
e7e13ecc74 | ||
|
|
991b8d2040 | ||
|
|
43fadbf6b4 | ||
|
|
ca79d37135 | ||
|
|
df8ef15535 | ||
|
|
249c1530d0 | ||
|
|
081b769abc | ||
|
|
b8b101d747 | ||
|
|
a19be192e0 | ||
|
|
92da82a200 | ||
|
|
820ba1dfba | ||
|
|
63c8962f09 | ||
|
|
c1a6996549 | ||
|
|
05253841af | ||
|
|
f2ef0503a0 | ||
|
|
938da38fc3 | ||
|
|
9311a87bf5 | ||
|
|
b45294ded3 | ||
|
|
82d3190016 | ||
|
|
d8cbcc1977 | ||
|
|
4b69543515 | ||
|
|
97ef4a35b9 | ||
|
|
f782c78650 | ||
|
|
139ed34c74 | ||
|
|
7f14d013ac | ||
|
|
963e27dda4 | ||
|
|
b8e3d57fea | ||
|
|
0de2a16d0f | ||
|
|
c8c2413a09 | ||
|
|
291331f878 | ||
|
|
a13cdbdf3d | ||
|
|
1bf713f279 | ||
|
|
10c8ee417b | ||
|
|
b23134f4f1 | ||
|
|
f45a6f806b | ||
|
|
d3857a00d5 | ||
|
|
8c9b90a9f9 | ||
|
|
4eedc88935 | ||
|
|
343ea1b82d | ||
|
|
36e13653d2 | ||
|
|
80444b2165 | ||
|
|
262f06dd2b | ||
|
|
bd87119c2e | ||
|
|
0dfa037aa8 | ||
|
|
c32a471573 | ||
|
|
97b7e51171 | ||
|
|
433712b407 | ||
|
|
5d87e0f429 | ||
|
|
acb087f1e5 | ||
|
|
10c12623bf | ||
|
|
2fe20553b3 | ||
|
|
b431bb197a | ||
|
|
eb9d625926 | ||
|
|
3a69534b09 | ||
|
|
8f2cedcb73 | ||
|
|
3658953ff3 | ||
|
|
0be5893e37 | ||
|
|
c87e38c4cf | ||
|
|
4874610ad6 | ||
|
|
9180282fc6 | ||
|
|
118f30f32e | ||
|
|
bd10da126f | ||
|
|
b73a7928ca | ||
|
|
3e20c2ea93 | ||
|
|
60130d3d68 | ||
|
|
c45ede2e5d | ||
|
|
e167061f53 | ||
|
|
5560fb6c9e | ||
|
|
9808b6c961 | ||
|
|
e8cfde579e | ||
|
|
f695fb4d51 | ||
|
|
a0e0549d90 | ||
|
|
ba034c6c8c | ||
|
|
008bb85c59 | ||
|
|
cf1c1294d3 | ||
|
|
11d5d314cc | ||
|
|
6f0de3071a | ||
|
|
87d2597292 | ||
|
|
437bc04fe8 | ||
|
|
67a0d6a187 | ||
|
|
abb52bca81 | ||
|
|
d2d6889278 | ||
|
|
bdca592219 | ||
|
|
5c0c7b9ec3 | ||
|
|
9717599fb9 | ||
|
|
4d7de2f814 | ||
|
|
779590ce1c | ||
|
|
f3a185ff9c | ||
|
|
5a5a106984 | ||
|
|
796b421d99 | ||
|
|
0c03e8dbe9 | ||
|
|
47cf4e3ffe | ||
|
|
0ea0fc151d | ||
|
|
b7e5afec9f | ||
|
|
7a2bb67e82 | ||
|
|
e0612bec07 | ||
|
|
a06f4b6776 | ||
|
|
275670a526 | ||
|
|
d0d62526dd | ||
|
|
aefdf412b0 | ||
|
|
56ab6b2512 | ||
|
|
d1dea85cf5 | ||
|
|
84b0d39763 | ||
|
|
3aff225bc3 | ||
|
|
04458e01be | ||
|
|
ae51cfb8c0 | ||
|
|
c116a9c037 | ||
|
|
fb58758684 | ||
|
|
25fbcbc68c | ||
|
|
a670286b45 | ||
|
|
52ba55b17f | ||
|
|
ff0fc98c36 | ||
|
|
9f78a2263d | ||
|
|
9b4696a80b | ||
|
|
70fe8cae39 | ||
|
|
95eb45ab08 | ||
|
|
84f8e57141 | ||
|
|
f484b6df0d | ||
|
|
34c1d45ee0 | ||
|
|
09a105d9ad | ||
|
|
6bd1787d0a | ||
|
|
37040f5064 | ||
|
|
531397ec07 | ||
|
|
d6cc0f81de | ||
|
|
f8ef8a466a | ||
|
|
713015e26a | ||
|
|
f9c1e81c5e | ||
|
|
0549d113e6 | ||
|
|
0d842978ec | ||
|
|
55476ef6ea | ||
|
|
0e130d8fdd | ||
|
|
20bcb84956 | ||
|
|
bbb1d57081 | ||
|
|
121406569b | ||
|
|
4866c775ce | ||
|
|
7c5ab12270 | ||
|
|
099edfac20 | ||
|
|
aa31df0fd5 | ||
|
|
13fbeb6cdb | ||
|
|
8d557447df | ||
|
|
e6e3f2455f | ||
|
|
c9c518ee84 | ||
|
|
214731e964 | ||
|
|
c4b09c9a0a | ||
|
|
f5b5b2fb70 | ||
|
|
bb3cdd382b | ||
|
|
8d09b5c273 | ||
|
|
d92fa7fa72 | ||
|
|
0c45b7f615 | ||
|
|
bfa1116115 | ||
|
|
4984237987 | ||
|
|
3839573151 | ||
|
|
e02dc53df3 | ||
|
|
bedae1e12c | ||
|
|
b4eb73be98 | ||
|
|
0ac3f776fa | ||
|
|
8e8a4fff11 | ||
|
|
579ffcc64d | ||
|
|
81943fb31d | ||
|
|
70dd0bf12e | ||
|
|
c2d462c1e7 | ||
|
|
49e050cc60 | ||
|
|
f6d829a2f3 | ||
|
|
e44e3b6f25 | ||
|
|
af603661c0 | ||
|
|
35c6113777 | ||
|
|
3c2f729ddc | ||
|
|
0d63cb765f | ||
|
|
3cb414511b | ||
|
|
f55c36d42d | ||
|
|
26bb301cc0 | ||
|
|
4159e483ee | ||
|
|
7eb6f7cc07 | ||
|
|
a7d01b0b03 | ||
|
|
1e5cfddf83 | ||
|
|
006fc5b10a | ||
|
|
35a4b685b3 | ||
|
|
b166818ef4 | ||
|
|
34cd9f11d0 | ||
|
|
0711d62085 | ||
|
|
f70aeafb5f | ||
|
|
e2279b3589 | ||
|
|
87b68e99ec | ||
|
|
b6c8b787e8 | ||
|
|
78f26edc29 | ||
|
|
5e6a72de90 | ||
|
|
dcc559f8b6 | ||
|
|
eda49cced0 | ||
|
|
14e41ab119 | ||
|
|
46151456d8 | ||
|
|
39773a022a | ||
|
|
5f49a6450f | ||
|
|
dc8425c580 | ||
|
|
910bd371e4 | ||
|
|
802a225e11 | ||
|
|
84f66fa689 | ||
|
|
0b7e88d0e0 | ||
|
|
1fcaf95df5 | ||
|
|
6c7434531f | ||
|
|
5ec1c2b68b | ||
|
|
d8636d8346 | ||
|
|
434763c74d | ||
|
|
8cd2c1b43b | ||
|
|
44711787a4 | ||
|
|
98fd0ee683 | ||
|
|
303e4ce961 | ||
|
|
76f29298cd | ||
|
|
17f5d0a69f | ||
|
|
90561de438 | ||
|
|
aedd48c298 | ||
|
|
febbb85532 | ||
|
|
af67a35b75 | ||
|
|
dd34d458f5 | ||
|
|
603d4bcf87 | ||
|
|
2dadc1f2b3 | ||
|
|
936151fae5 | ||
|
|
9760eb7f2b | ||
|
|
7851bed00c | ||
|
|
6aba0b20c6 | ||
|
|
cadfed2348 | ||
|
|
44e2fa6996 | ||
|
|
d0ff617e17 | ||
|
|
8e499569a4 | ||
|
|
5e0ebddd6f | ||
|
|
c0f61f6c2b | ||
|
|
df60de38b0 | ||
|
|
cb086bb8e9 | ||
|
|
ee2e9dc7d6 | ||
|
|
85cd3c68b7 | ||
|
|
1b0b6e63f2 | ||
|
|
12fc79e8d3 | ||
|
|
ca2e7b9509 | ||
|
|
8e8becc43e | ||
|
|
dcec6c3dc8 | ||
|
|
c0e59c4508 | ||
|
|
cd379aadbf | ||
|
|
ccdd54b187 | ||
|
|
3f22dbaa2e | ||
|
|
c18dc0a9ab | ||
|
|
f0e4296d93 | ||
|
|
b3750109c6 | ||
|
|
93025c9845 | ||
|
|
df348644b1 | ||
|
|
8749b0d750 | ||
|
|
a6a1519c06 | ||
|
|
3068e19843 | ||
|
|
55feb1e735 | ||
|
|
bb7dc69131 | ||
|
|
aa9003a524 | ||
|
|
4e9da5249d | ||
|
|
f502739df2 | ||
|
|
0f2ff29378 | ||
|
|
2921e7ed3c | ||
|
|
25d44e8d37 | ||
|
|
0a480a26a3 | ||
|
|
d5da64dd8d | ||
|
|
92adcd8635 | ||
|
|
ee0c4b15c2 | ||
|
|
507f54198e | ||
|
|
0ed342b433 | ||
|
|
363c86faf3 | ||
|
|
095a7ad060 | ||
|
|
ab5981bbbd | ||
|
|
ac2fb53dfd | ||
|
|
02ff5de1ff | ||
|
|
5cd5d480d9 | ||
|
|
a3c7d772fc | ||
|
|
fe0c69dba7 | ||
|
|
e5365234c3 | ||
|
|
1531175bd3 | ||
|
|
62add59ff4 | ||
|
|
d8daca657b | ||
|
|
1891da46ea | ||
|
|
22ae894745 | ||
|
|
160810c69d | ||
|
|
2ae23b920a | ||
|
|
a7edfb082f | ||
|
|
3ac203b05f | ||
|
|
7c3eb19fc4 | ||
|
|
70c6fac743 | ||
|
|
e19d7250d5 | ||
|
|
a850d5dba7 | ||
|
|
0cf0f10654 | ||
|
|
8429f154ca | ||
|
|
7b4f5ad362 | ||
|
|
583b439557 | ||
|
|
05922de102 | ||
|
|
7675a44b90 | ||
|
|
1e4d645683 | ||
|
|
b5ae04605a | ||
|
|
2240d6b94c | ||
|
|
d1536ee636 | ||
|
|
8a926add7a | ||
|
|
31f769900a | ||
|
|
33ad777664 | ||
|
|
59a4e4a337 | ||
|
|
66a39933b0 | ||
|
|
ad395e3bba | ||
|
|
cfc6f2c229 | ||
|
|
63aa41c766 | ||
|
|
037e0e93d3 | ||
|
|
db8b5865b3 | ||
|
|
bd2ccc6672 | ||
|
|
bb63d40cdf | ||
|
|
65285b8885 | ||
|
|
326b8f2b4f | ||
|
|
9f3df52fcc | ||
|
|
875838c277 | ||
|
|
adaafd1fda | ||
|
|
50c5efddaa | ||
|
|
c4be054161 | ||
|
|
61186356f3 | ||
|
|
9d60a19440 | ||
|
|
108c212855 | ||
|
|
ae8db81c4e | ||
|
|
51c970d1d0 | ||
|
|
d2d47cb607 | ||
|
|
b7a5447c8b | ||
|
|
2f80780f75 | ||
|
|
053ec2598f | ||
|
|
85bed4ca77 | ||
|
|
d0d268ffdc | ||
|
|
a709fa5f6c | ||
|
|
0c99638129 | ||
|
|
8a03ab2f64 | ||
|
|
d2ad5b43f2 | ||
|
|
4fae49158c | ||
|
|
36268ffb73 | ||
|
|
1bd70454e1 | ||
|
|
dbc53b99c1 | ||
|
|
9ec3aee8aa | ||
|
|
8d50754056 | ||
|
|
6ee71dae35 | ||
|
|
9605921857 | ||
|
|
6f06eb5ecc | ||
|
|
4ca620e450 | ||
|
|
69c5668b13 | ||
|
|
17fc1c5dbc | ||
|
|
6cfe6ed543 | ||
|
|
b9fb4469d8 | ||
|
|
f53c581845 | ||
|
|
a39710f9bc | ||
|
|
47734f54e8 | ||
|
|
52b3636e52 | ||
|
|
7aced2522a | ||
|
|
5b2d43dffb | ||
|
|
b4d6a44c21 | ||
|
|
adf8644cc3 | ||
|
|
acb6dc9a4f | ||
|
|
463796fb4a | ||
|
|
c74a298b5b | ||
|
|
fb30535730 | ||
|
|
7249a3c846 | ||
|
|
21fce10742 | ||
|
|
ea04c6d88f | ||
|
|
ce6127d87a | ||
|
|
646b1e36bf | ||
|
|
3eac379a13 | ||
|
|
88b6754c73 | ||
|
|
e0aa850d18 | ||
|
|
4a85837f2e | ||
|
|
9002116572 | ||
|
|
7517569ea4 | ||
|
|
cd86c78750 | ||
|
|
a0da295143 | ||
|
|
fd6ca8b081 | ||
|
|
3dea0a917e | ||
|
|
a94c333754 | ||
|
|
7fd482e3d6 | ||
|
|
162c1f1f31 | ||
|
|
f8affb2b6a | ||
|
|
738863ad38 | ||
|
|
59b3e65618 | ||
|
|
3840e50868 | ||
|
|
79339aefed | ||
|
|
df3a4c5916 | ||
|
|
2e21ae0da7 | ||
|
|
6992bfeef9 | ||
|
|
625d7e2e44 | ||
|
|
5af9082dc6 | ||
|
|
902d89b29e | ||
|
|
dfb0ea4202 | ||
|
|
890894b3ae | ||
|
|
dd95921eda | ||
|
|
9479a88393 | ||
|
|
3519611d8e | ||
|
|
1f04e0e655 | ||
|
|
074c1ff775 | ||
|
|
0e28e6a323 | ||
|
|
9f01e0f6ea | ||
|
|
805a03dfd2 | ||
|
|
99bf3a6c6a | ||
|
|
45ea8125d3 | ||
|
|
a606511a7e | ||
|
|
b5f215960f | ||
|
|
dc8ddc0dcc | ||
|
|
c84c098d2c | ||
|
|
f5a4071a81 | ||
|
|
ac5316e3ac | ||
|
|
2a2599de88 | ||
|
|
aac25fa480 | ||
|
|
999acc4273 | ||
|
|
5dcf3d8419 | ||
|
|
9c14853e73 | ||
|
|
5659122f1d | ||
|
|
3671222d7b | ||
|
|
bfd2883a4b | ||
|
|
67156d159f | ||
|
|
45c0891c3b | ||
|
|
0694372c61 | ||
|
|
2bbf4ebc9e | ||
|
|
818b7bb33f | ||
|
|
a265ecfade | ||
|
|
d52749c71a | ||
|
|
5eb5b93c0e | ||
|
|
7c6a39ec91 | ||
|
|
57c3a5c349 | ||
|
|
07c4c58ce4 | ||
|
|
6a07b468a3 | ||
|
|
d63fdf7d35 | ||
|
|
d6eaa9fd7a | ||
|
|
b7c4c28592 | ||
|
|
042a0f7986 | ||
|
|
5cc9e014b2 | ||
|
|
94e30485c4 | ||
|
|
c9e76ae5d4 | ||
|
|
568ed2f0f6 | ||
|
|
5237dc073a | ||
|
|
a09f754b48 | ||
|
|
64ad03ca60 | ||
|
|
b79b443a28 | ||
|
|
02148de9e2 | ||
|
|
d5bd93ebda | ||
|
|
76bbd94f5d | ||
|
|
b5546b4ab9 | ||
|
|
dca9389735 | ||
|
|
d3bebd94aa | ||
|
|
53c807bd5a | ||
|
|
dffbdf15f2 | ||
|
|
e09c35c177 | ||
|
|
0342d295e1 | ||
|
|
eb9849c411 | ||
|
|
93d48fae9d | ||
|
|
d90a7b2345 | ||
|
|
c2f6a364b8 | ||
|
|
bbadd92ffb | ||
|
|
6a7de24a04 | ||
|
|
67ccdd36fb | ||
|
|
2ddf55a60d | ||
|
|
57e7bc81d4 | ||
|
|
777f09598f | ||
|
|
81a9ef1df0 | ||
|
|
d063bc87a1 | ||
|
|
5fce08de65 | ||
|
|
c0db966afd | ||
|
|
9288995cad | ||
|
|
4d2abb4f65 | ||
|
|
60014b6530 | ||
|
|
3b57cab6b4 | ||
|
|
967467664b | ||
|
|
b87b5cffd8 | ||
|
|
bb44987af1 | ||
|
|
8d3ef2b224 | ||
|
|
5e409295f9 | ||
|
|
530c189f9c | ||
|
|
f05fef9588 | ||
|
|
a257b5c54c | ||
|
|
5b9f7372fc | ||
|
|
a4c0a9b3a5 | ||
|
|
7d65b4c941 | ||
|
|
abd0ee7bce | ||
|
|
9e3eb20a04 | ||
|
|
6dc655c3b4 | ||
|
|
9f595a94fb | ||
|
|
5dc215a143 | ||
|
|
306b78ba5f | ||
|
|
bccb646a07 | ||
|
|
4a5dc8cdd6 | ||
|
|
52a751507a | ||
|
|
533b9f969d | ||
|
|
5de7928bc0 | ||
|
|
aad9b07f86 | ||
|
|
3e2c401253 | ||
|
|
762e63d042 | ||
|
|
ec6d40a51c | ||
|
|
47c2c61626 | ||
|
|
73c941f6c5 | ||
|
|
685edb5f76 | ||
|
|
5987b6dcb9 | ||
|
|
cb029e0bb0 | ||
|
|
553ec35947 | ||
|
|
f93940bfa9 | ||
|
|
486f93eb28 | ||
|
|
462db36fef | ||
|
|
485f7f45e8 | ||
|
|
a446d8a98c | ||
|
|
b4a31fc578 | ||
|
|
22321c22cc | ||
|
|
4419c236e2 | ||
|
|
1731a2534c | ||
|
|
ec0edf47b1 | ||
|
|
57c69738e3 | ||
|
|
fb1f258b2b | ||
|
|
d419dd0c05 | ||
|
|
65960aa3f7 | ||
|
|
a25afe2834 | ||
|
|
4cdfa3bddb | ||
|
|
9e7bef9fa7 | ||
|
|
68a1b1f91f | ||
|
|
1659ca532d | ||
|
|
8ea16daae4 | ||
|
|
5bd89acf9a | ||
|
|
2b8db74be4 | ||
|
|
d7f9a7114d | ||
|
|
f7a59eb86e | ||
|
|
37eef965ad | ||
|
|
b706430e66 | ||
|
|
5012aa5cb0 | ||
|
|
1c5f7adf4e | ||
|
|
ff364e3913 | ||
|
|
0e2a4605ff | ||
|
|
ca5b9ce0d3 | ||
|
|
953196ec21 | ||
|
|
b5be3d5ac3 | ||
|
|
5d9e8287d3 | ||
|
|
dc291708ae | ||
|
|
257e82fe4e | ||
|
|
ab6d4d645e | ||
|
|
58ebd84326 | ||
|
|
76b24dafed | ||
|
|
431f563ff6 | ||
|
|
e308e610c6 | ||
|
|
5e77cbd185 | ||
|
|
2dbc7ff4b7 | ||
|
|
49a6c5776d | ||
|
|
98f6001c9c | ||
|
|
ce38a93177 | ||
|
|
92fbf468f2 | ||
|
|
e09ec4a6f3 | ||
|
|
db63e0c829 | ||
|
|
8ed88d4a58 | ||
|
|
d098ada777 | ||
|
|
1add999c5a | ||
|
|
fad217837f | ||
|
|
983af1af7b | ||
|
|
bcf2c4e9b6 | ||
|
|
c72f2fd546 | ||
|
|
f54864a476 | ||
|
|
fe1ff456c6 | ||
|
|
ec25ead5ac | ||
|
|
e8277cb67c | ||
|
|
da0fb37a20 | ||
|
|
28675eee33 | ||
|
|
84561cbc41 | ||
|
|
4e48c881aa | ||
|
|
af8cd0414b | ||
|
|
f54076da29 | ||
|
|
1d0eb97592 | ||
|
|
57f1c268ef | ||
|
|
01402e4f96 | ||
|
|
6137a643d8 | ||
|
|
1badfe3aff | ||
|
|
a549104fe1 | ||
|
|
2aab2ddc55 | ||
|
|
42e01362a5 | ||
|
|
c3cf24ba25 | ||
|
|
7809fb6a9b | ||
|
|
144fc2a443 | ||
|
|
c67e005b2c | ||
|
|
1c6913eec2 | ||
|
|
fb5c4a1375 | ||
|
|
60b8392478 | ||
|
|
7145fb96dd | ||
|
|
37d94aca6d | ||
|
|
9b697edfca | ||
|
|
22e30be946 | ||
|
|
bc9d35b85f | ||
|
|
4dfb6e4983 | ||
|
|
09d78ab5ad | ||
|
|
b2ebdb7ef0 | ||
|
|
83d6a30b2e | ||
|
|
19dee6d22a | ||
|
|
afd27630fb | ||
|
|
cad1f1da1d | ||
|
|
cd62bd86fd | ||
|
|
79c3bc9eca | ||
|
|
10439eea4b | ||
|
|
75cc866e72 | ||
|
|
8b2ca6c571 | ||
|
|
52db73e8e3 | ||
|
|
79d15ec91c | ||
|
|
5af91df2b9 | ||
|
|
89a85c3d8c | ||
|
|
e44c6391b1 | ||
|
|
99d3234855 | ||
|
|
32cc5123f5 | ||
|
|
93415175bb | ||
|
|
f04bb69dbc | ||
|
|
9f8c9940bd | ||
|
|
496f527dff | ||
|
|
385e6f58a8 | ||
|
|
c8c37ad628 | ||
|
|
cc57732e24 | ||
|
|
6011df8952 | ||
|
|
08e494aba5 | ||
|
|
77c428e4c7 | ||
|
|
c22a2b93fa | ||
|
|
7f84363bf4 | ||
|
|
0980c3a270 | ||
|
|
7cec3aa27c | ||
|
|
1ddb39f6d0 | ||
|
|
10d2e38315 | ||
|
|
5299690cb7 | ||
|
|
98c1dca7a8 | ||
|
|
54c022d58a | ||
|
|
77d40ddc7d | ||
|
|
092841ca5e | ||
|
|
70238a613d | ||
|
|
5b8d373527 | ||
|
|
4e3664b26f | ||
|
|
76f5cc368b | ||
|
|
2f4cd21a14 | ||
|
|
d369aa761a | ||
|
|
d795806e3d | ||
|
|
d45a80ed06 | ||
|
|
09b46d22af | ||
|
|
b157afac13 | ||
|
|
edaf5c8167 | ||
|
|
1d6c9e3d94 | ||
|
|
ddbc96206f | ||
|
|
cee5f4e275 | ||
|
|
03a1ffc59b | ||
|
|
6e921a0192 | ||
|
|
99eb48c27f | ||
|
|
06dbfe52d0 | ||
|
|
b516de119c | ||
|
|
dcb2087f4b | ||
|
|
7de94f3632 | ||
|
|
909e2304c1 | ||
|
|
ae0b854314 | ||
|
|
6a6054afee | ||
|
|
3377e90b81 | ||
|
|
342c7f6510 | ||
|
|
982fba167a | ||
|
|
8026e64d7c | ||
|
|
ebbfd5a6c7 | ||
|
|
356077541c | ||
|
|
0b9a22b089 | ||
|
|
cce6f60b70 | ||
|
|
d57dc5d0cd | ||
|
|
6088f5eef5 | ||
|
|
5c96b11479 | ||
|
|
afda849f3e | ||
|
|
f2f769b34a | ||
|
|
45558f3087 | ||
|
|
c10b643af9 | ||
|
|
569dd2d6b7 |
60
.github/workflows/builder.yml
vendored
60
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
publish: ${{ steps.version.outputs.publish }}
|
publish: ${{ steps.version.outputs.publish }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -90,7 +90,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
@@ -162,18 +162,6 @@ jobs:
|
|||||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Adjustments for armhf
|
|
||||||
if: matrix.arch == 'armhf'
|
|
||||||
run: |
|
|
||||||
# Pandas has issues building on armhf, it is expected they
|
|
||||||
# will drop the platform in the near future (they consider it
|
|
||||||
# "flimsy" on 386). The following packages depend on pandas,
|
|
||||||
# so we comment them out.
|
|
||||||
sed -i "s|env-canada|# env-canada|g" requirements_all.txt
|
|
||||||
sed -i "s|noaa-coops|# noaa-coops|g" requirements_all.txt
|
|
||||||
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
|
|
||||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||||
with:
|
with:
|
||||||
@@ -226,24 +214,16 @@ jobs:
|
|||||||
- odroid-c4
|
- odroid-c4
|
||||||
- odroid-m1
|
- odroid-m1
|
||||||
- odroid-n2
|
- odroid-n2
|
||||||
- odroid-xu
|
|
||||||
- qemuarm
|
|
||||||
- qemuarm-64
|
- qemuarm-64
|
||||||
- qemux86
|
|
||||||
- qemux86-64
|
- qemux86-64
|
||||||
- raspberrypi
|
|
||||||
- raspberrypi2
|
|
||||||
- raspberrypi3
|
|
||||||
- raspberrypi3-64
|
- raspberrypi3-64
|
||||||
- raspberrypi4
|
|
||||||
- raspberrypi4-64
|
- raspberrypi4-64
|
||||||
- raspberrypi5-64
|
- raspberrypi5-64
|
||||||
- tinker
|
|
||||||
- yellow
|
- yellow
|
||||||
- green
|
- green
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Set build additional args
|
- name: Set build additional args
|
||||||
run: |
|
run: |
|
||||||
@@ -281,7 +261,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
uses: home-assistant/actions/helpers/git-init@master
|
uses: home-assistant/actions/helpers/git-init@master
|
||||||
@@ -297,6 +277,7 @@ jobs:
|
|||||||
key-description: "Home Assistant Core"
|
key-description: "Home Assistant Core"
|
||||||
version: ${{ needs.init.outputs.version }}
|
version: ${{ needs.init.outputs.version }}
|
||||||
channel: ${{ needs.init.outputs.channel }}
|
channel: ${{ needs.init.outputs.channel }}
|
||||||
|
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||||
|
|
||||||
- name: Update version file (stable -> beta)
|
- name: Update version file (stable -> beta)
|
||||||
if: needs.init.outputs.channel == 'stable'
|
if: needs.init.outputs.channel == 'stable'
|
||||||
@@ -306,6 +287,7 @@ jobs:
|
|||||||
key-description: "Home Assistant Core"
|
key-description: "Home Assistant Core"
|
||||||
version: ${{ needs.init.outputs.version }}
|
version: ${{ needs.init.outputs.version }}
|
||||||
channel: beta
|
channel: beta
|
||||||
|
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||||
|
|
||||||
publish_container:
|
publish_container:
|
||||||
name: Publish meta container for ${{ matrix.registry }}
|
name: Publish meta container for ${{ matrix.registry }}
|
||||||
@@ -323,7 +305,7 @@ jobs:
|
|||||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||||
@@ -357,27 +339,12 @@ jobs:
|
|||||||
|
|
||||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||||
"${registry}/i386-homeassistant:${tag_r}" \
|
|
||||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
|
||||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
|
||||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||||
--os linux --arch amd64
|
--os linux --arch amd64
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
|
||||||
"${registry}/i386-homeassistant:${tag_r}" \
|
|
||||||
--os linux --arch 386
|
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
|
||||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
|
||||||
--os linux --arch arm --variant=v6
|
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
|
||||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
|
||||||
--os linux --arch arm --variant=v7
|
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||||
--os linux --arch arm64 --variant=v8
|
--os linux --arch arm64 --variant=v8
|
||||||
@@ -405,23 +372,14 @@ jobs:
|
|||||||
|
|
||||||
# Pull images from github container registry and verify signature
|
# Pull images from github container registry and verify signature
|
||||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
docker pull "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
docker pull "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
docker pull "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
|
|
||||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
validate_image "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
validate_image "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
|
|
||||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||||
# Upload images to dockerhub
|
# Upload images to dockerhub
|
||||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
|
||||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
|
||||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
|
||||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -456,7 +414,7 @@ jobs:
|
|||||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
@@ -501,7 +459,7 @@ jobs:
|
|||||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
|
|||||||
14
.github/workflows/ci.yaml
vendored
14
.github/workflows/ci.yaml
vendored
@@ -37,10 +37,10 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 1
|
CACHE_VERSION: 2
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 1
|
MYPY_CACHE_VERSION: 1
|
||||||
HA_SHORT_VERSION: "2025.11"
|
HA_SHORT_VERSION: "2025.12"
|
||||||
DEFAULT_PYTHON: "3.13"
|
DEFAULT_PYTHON: "3.13"
|
||||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
@@ -99,7 +99,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- &checkout
|
- &checkout
|
||||||
name: Check out code from GitHub
|
name: Check out code from GitHub
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
- name: Generate partial Python venv restore key
|
- name: Generate partial Python venv restore key
|
||||||
id: generate_python_cache_key
|
id: generate_python_cache_key
|
||||||
run: |
|
run: |
|
||||||
@@ -502,7 +502,6 @@ jobs:
|
|||||||
libavfilter-dev \
|
libavfilter-dev \
|
||||||
libavformat-dev \
|
libavformat-dev \
|
||||||
libavutil-dev \
|
libavutil-dev \
|
||||||
libgammu-dev \
|
|
||||||
libswresample-dev \
|
libswresample-dev \
|
||||||
libswscale-dev \
|
libswscale-dev \
|
||||||
libudev-dev
|
libudev-dev
|
||||||
@@ -623,7 +622,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- *checkout
|
- *checkout
|
||||||
- name: Dependency review
|
- name: Dependency review
|
||||||
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
|
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
|
||||||
with:
|
with:
|
||||||
license-check: false # We use our own license audit checks
|
license-check: false # We use our own license audit checks
|
||||||
|
|
||||||
@@ -801,8 +800,7 @@ jobs:
|
|||||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg
|
||||||
libgammu-dev
|
|
||||||
- *checkout
|
- *checkout
|
||||||
- *setup-python-default
|
- *setup-python-default
|
||||||
- *cache-restore-python-default
|
- *cache-restore-python-default
|
||||||
@@ -853,7 +851,6 @@ jobs:
|
|||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev \
|
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- *checkout
|
- *checkout
|
||||||
- *setup-python-matrix
|
- *setup-python-matrix
|
||||||
@@ -1233,7 +1230,6 @@ jobs:
|
|||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev \
|
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- *checkout
|
- *checkout
|
||||||
- *setup-python-matrix
|
- *setup-python-matrix
|
||||||
|
|||||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
|||||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
|
|||||||
42
.github/workflows/wheels.yml
vendored
42
.github/workflows/wheels.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- &checkout
|
- &checkout
|
||||||
name: Checkout the repository
|
name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
@@ -77,20 +77,8 @@ jobs:
|
|||||||
|
|
||||||
# Use C-Extension for SQLAlchemy
|
# Use C-Extension for SQLAlchemy
|
||||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||||
|
|
||||||
# Add additional pip wheel build constraints
|
|
||||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
|
||||||
) > .env_file
|
) > .env_file
|
||||||
|
|
||||||
- name: Write pip wheel build constraints
|
|
||||||
run: |
|
|
||||||
(
|
|
||||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
|
||||||
# this caused the numpy builds to fail
|
|
||||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
|
||||||
echo "ninja==1.11.1.1"
|
|
||||||
) > build_constraints.txt
|
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||||
with:
|
with:
|
||||||
@@ -99,13 +87,6 @@ jobs:
|
|||||||
include-hidden-files: true
|
include-hidden-files: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload build_constraints
|
|
||||||
uses: *actions-upload-artifact
|
|
||||||
with:
|
|
||||||
name: build_constraints
|
|
||||||
path: ./build_constraints.txt
|
|
||||||
overwrite: true
|
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: *actions-upload-artifact
|
uses: *actions-upload-artifact
|
||||||
with:
|
with:
|
||||||
@@ -138,13 +119,6 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
- arch: aarch64
|
- arch: aarch64
|
||||||
os: ubuntu-24.04-arm
|
os: ubuntu-24.04-arm
|
||||||
exclude:
|
|
||||||
- abi: cp314
|
|
||||||
arch: armv7
|
|
||||||
- abi: cp314
|
|
||||||
arch: armhf
|
|
||||||
- abi: cp314
|
|
||||||
arch: i386
|
|
||||||
steps:
|
steps:
|
||||||
- *checkout
|
- *checkout
|
||||||
|
|
||||||
@@ -154,12 +128,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- &download-build-constraints
|
|
||||||
name: Download build_constraints
|
|
||||||
uses: *actions-download-artifact
|
|
||||||
with:
|
|
||||||
name: build_constraints
|
|
||||||
|
|
||||||
- &download-requirements-diff
|
- &download-requirements-diff
|
||||||
name: Download requirements_diff
|
name: Download requirements_diff
|
||||||
uses: *actions-download-artifact
|
uses: *actions-download-artifact
|
||||||
@@ -199,7 +167,7 @@ jobs:
|
|||||||
- *checkout
|
- *checkout
|
||||||
|
|
||||||
- *download-env-file
|
- *download-env-file
|
||||||
- *download-build-constraints
|
|
||||||
- *download-requirements-diff
|
- *download-requirements-diff
|
||||||
|
|
||||||
- name: Download requirements_all_wheels
|
- name: Download requirements_all_wheels
|
||||||
@@ -209,10 +177,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Adjust build env
|
- name: Adjust build env
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ matrix.arch }}" = "i386" ]; then
|
|
||||||
echo "NPY_DISABLE_SVML=1" >> .env_file
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Do not pin numpy in wheels building
|
# Do not pin numpy in wheels building
|
||||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||||
@@ -228,7 +192,7 @@ jobs:
|
|||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ repos:
|
|||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
language: script
|
language: script
|
||||||
types: [text]
|
types: [text]
|
||||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(conditions|quality_scale|services|triggers)\.yaml|homeassistant/brands/.*\.json|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||||
- id: hassfest-metadata
|
- id: hassfest-metadata
|
||||||
name: hassfest-metadata
|
name: hassfest-metadata
|
||||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||||
|
|||||||
@@ -107,6 +107,7 @@ homeassistant.components.automation.*
|
|||||||
homeassistant.components.awair.*
|
homeassistant.components.awair.*
|
||||||
homeassistant.components.axis.*
|
homeassistant.components.axis.*
|
||||||
homeassistant.components.azure_storage.*
|
homeassistant.components.azure_storage.*
|
||||||
|
homeassistant.components.backblaze_b2.*
|
||||||
homeassistant.components.backup.*
|
homeassistant.components.backup.*
|
||||||
homeassistant.components.baf.*
|
homeassistant.components.baf.*
|
||||||
homeassistant.components.bang_olufsen.*
|
homeassistant.components.bang_olufsen.*
|
||||||
@@ -230,6 +231,7 @@ homeassistant.components.google_cloud.*
|
|||||||
homeassistant.components.google_drive.*
|
homeassistant.components.google_drive.*
|
||||||
homeassistant.components.google_photos.*
|
homeassistant.components.google_photos.*
|
||||||
homeassistant.components.google_sheets.*
|
homeassistant.components.google_sheets.*
|
||||||
|
homeassistant.components.google_weather.*
|
||||||
homeassistant.components.govee_ble.*
|
homeassistant.components.govee_ble.*
|
||||||
homeassistant.components.gpsd.*
|
homeassistant.components.gpsd.*
|
||||||
homeassistant.components.greeneye_monitor.*
|
homeassistant.components.greeneye_monitor.*
|
||||||
@@ -395,7 +397,6 @@ homeassistant.components.otbr.*
|
|||||||
homeassistant.components.overkiz.*
|
homeassistant.components.overkiz.*
|
||||||
homeassistant.components.overseerr.*
|
homeassistant.components.overseerr.*
|
||||||
homeassistant.components.p1_monitor.*
|
homeassistant.components.p1_monitor.*
|
||||||
homeassistant.components.pandora.*
|
|
||||||
homeassistant.components.panel_custom.*
|
homeassistant.components.panel_custom.*
|
||||||
homeassistant.components.paperless_ngx.*
|
homeassistant.components.paperless_ngx.*
|
||||||
homeassistant.components.peblar.*
|
homeassistant.components.peblar.*
|
||||||
@@ -578,6 +579,7 @@ homeassistant.components.wiz.*
|
|||||||
homeassistant.components.wled.*
|
homeassistant.components.wled.*
|
||||||
homeassistant.components.workday.*
|
homeassistant.components.workday.*
|
||||||
homeassistant.components.worldclock.*
|
homeassistant.components.worldclock.*
|
||||||
|
homeassistant.components.xbox.*
|
||||||
homeassistant.components.xiaomi_ble.*
|
homeassistant.components.xiaomi_ble.*
|
||||||
homeassistant.components.yale_smart_alarm.*
|
homeassistant.components.yale_smart_alarm.*
|
||||||
homeassistant.components.yalexs_ble.*
|
homeassistant.components.yalexs_ble.*
|
||||||
|
|||||||
32
CODEOWNERS
generated
32
CODEOWNERS
generated
@@ -69,6 +69,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/airly/ @bieniu
|
/tests/components/airly/ @bieniu
|
||||||
/homeassistant/components/airnow/ @asymworks
|
/homeassistant/components/airnow/ @asymworks
|
||||||
/tests/components/airnow/ @asymworks
|
/tests/components/airnow/ @asymworks
|
||||||
|
/homeassistant/components/airobot/ @mettolen
|
||||||
|
/tests/components/airobot/ @mettolen
|
||||||
/homeassistant/components/airos/ @CoMPaTech
|
/homeassistant/components/airos/ @CoMPaTech
|
||||||
/tests/components/airos/ @CoMPaTech
|
/tests/components/airos/ @CoMPaTech
|
||||||
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
||||||
@@ -196,6 +198,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/azure_service_bus/ @hfurubotten
|
/homeassistant/components/azure_service_bus/ @hfurubotten
|
||||||
/homeassistant/components/azure_storage/ @zweckj
|
/homeassistant/components/azure_storage/ @zweckj
|
||||||
/tests/components/azure_storage/ @zweckj
|
/tests/components/azure_storage/ @zweckj
|
||||||
|
/homeassistant/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||||
|
/tests/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||||
/homeassistant/components/backup/ @home-assistant/core
|
/homeassistant/components/backup/ @home-assistant/core
|
||||||
/tests/components/backup/ @home-assistant/core
|
/tests/components/backup/ @home-assistant/core
|
||||||
/homeassistant/components/baf/ @bdraco @jfroy
|
/homeassistant/components/baf/ @bdraco @jfroy
|
||||||
@@ -316,8 +320,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/cpuspeed/ @fabaff
|
/tests/components/cpuspeed/ @fabaff
|
||||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||||
/homeassistant/components/cups/ @fabaff
|
|
||||||
/tests/components/cups/ @fabaff
|
|
||||||
/homeassistant/components/cync/ @Kinachi249
|
/homeassistant/components/cync/ @Kinachi249
|
||||||
/tests/components/cync/ @Kinachi249
|
/tests/components/cync/ @Kinachi249
|
||||||
/homeassistant/components/daikin/ @fredrike
|
/homeassistant/components/daikin/ @fredrike
|
||||||
@@ -510,8 +512,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/fjaraskupan/ @elupus
|
/tests/components/fjaraskupan/ @elupus
|
||||||
/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski
|
/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||||
/tests/components/flexit_bacnet/ @lellky @piotrbulinski
|
/tests/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||||
/homeassistant/components/flick_electric/ @ZephireNZ
|
|
||||||
/tests/components/flick_electric/ @ZephireNZ
|
|
||||||
/homeassistant/components/flipr/ @cnico
|
/homeassistant/components/flipr/ @cnico
|
||||||
/tests/components/flipr/ @cnico
|
/tests/components/flipr/ @cnico
|
||||||
/homeassistant/components/flo/ @dmulcahey
|
/homeassistant/components/flo/ @dmulcahey
|
||||||
@@ -609,6 +609,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/google_tasks/ @allenporter
|
/tests/components/google_tasks/ @allenporter
|
||||||
/homeassistant/components/google_travel_time/ @eifinger
|
/homeassistant/components/google_travel_time/ @eifinger
|
||||||
/tests/components/google_travel_time/ @eifinger
|
/tests/components/google_travel_time/ @eifinger
|
||||||
|
/homeassistant/components/google_weather/ @tronikos
|
||||||
|
/tests/components/google_weather/ @tronikos
|
||||||
/homeassistant/components/govee_ble/ @bdraco
|
/homeassistant/components/govee_ble/ @bdraco
|
||||||
/tests/components/govee_ble/ @bdraco
|
/tests/components/govee_ble/ @bdraco
|
||||||
/homeassistant/components/govee_light_local/ @Galorhallen
|
/homeassistant/components/govee_light_local/ @Galorhallen
|
||||||
@@ -627,6 +629,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/guardian/ @bachya
|
/tests/components/guardian/ @bachya
|
||||||
/homeassistant/components/habitica/ @tr4nt0r
|
/homeassistant/components/habitica/ @tr4nt0r
|
||||||
/tests/components/habitica/ @tr4nt0r
|
/tests/components/habitica/ @tr4nt0r
|
||||||
|
/homeassistant/components/hanna/ @bestycame
|
||||||
|
/tests/components/hanna/ @bestycame
|
||||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||||
/tests/components/hardkernel/ @home-assistant/core
|
/tests/components/hardkernel/ @home-assistant/core
|
||||||
/homeassistant/components/hardware/ @home-assistant/core
|
/homeassistant/components/hardware/ @home-assistant/core
|
||||||
@@ -846,6 +850,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/kraken/ @eifinger
|
/tests/components/kraken/ @eifinger
|
||||||
/homeassistant/components/kulersky/ @emlove
|
/homeassistant/components/kulersky/ @emlove
|
||||||
/tests/components/kulersky/ @emlove
|
/tests/components/kulersky/ @emlove
|
||||||
|
/homeassistant/components/labs/ @home-assistant/core
|
||||||
|
/tests/components/labs/ @home-assistant/core
|
||||||
/homeassistant/components/lacrosse_view/ @IceBotYT
|
/homeassistant/components/lacrosse_view/ @IceBotYT
|
||||||
/tests/components/lacrosse_view/ @IceBotYT
|
/tests/components/lacrosse_view/ @IceBotYT
|
||||||
/homeassistant/components/lamarzocco/ @zweckj
|
/homeassistant/components/lamarzocco/ @zweckj
|
||||||
@@ -1019,8 +1025,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/msteams/ @peroyvind
|
/homeassistant/components/msteams/ @peroyvind
|
||||||
/homeassistant/components/mullvad/ @meichthys
|
/homeassistant/components/mullvad/ @meichthys
|
||||||
/tests/components/mullvad/ @meichthys
|
/tests/components/mullvad/ @meichthys
|
||||||
/homeassistant/components/music_assistant/ @music-assistant
|
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
|
||||||
/tests/components/music_assistant/ @music-assistant
|
/tests/components/music_assistant/ @music-assistant @arturpragacz
|
||||||
/homeassistant/components/mutesync/ @currentoor
|
/homeassistant/components/mutesync/ @currentoor
|
||||||
/tests/components/mutesync/ @currentoor
|
/tests/components/mutesync/ @currentoor
|
||||||
/homeassistant/components/my/ @home-assistant/core
|
/homeassistant/components/my/ @home-assistant/core
|
||||||
@@ -1376,6 +1382,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/sanix/ @tomaszsluszniak
|
/tests/components/sanix/ @tomaszsluszniak
|
||||||
/homeassistant/components/satel_integra/ @Tommatheussen
|
/homeassistant/components/satel_integra/ @Tommatheussen
|
||||||
/tests/components/satel_integra/ @Tommatheussen
|
/tests/components/satel_integra/ @Tommatheussen
|
||||||
|
/homeassistant/components/saunum/ @mettolen
|
||||||
|
/tests/components/saunum/ @mettolen
|
||||||
/homeassistant/components/scene/ @home-assistant/core
|
/homeassistant/components/scene/ @home-assistant/core
|
||||||
/tests/components/scene/ @home-assistant/core
|
/tests/components/scene/ @home-assistant/core
|
||||||
/homeassistant/components/schedule/ @home-assistant/core
|
/homeassistant/components/schedule/ @home-assistant/core
|
||||||
@@ -1479,8 +1487,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/smhi/ @gjohansson-ST
|
/tests/components/smhi/ @gjohansson-ST
|
||||||
/homeassistant/components/smlight/ @tl-sl
|
/homeassistant/components/smlight/ @tl-sl
|
||||||
/tests/components/smlight/ @tl-sl
|
/tests/components/smlight/ @tl-sl
|
||||||
/homeassistant/components/sms/ @ocalvo
|
|
||||||
/tests/components/sms/ @ocalvo
|
|
||||||
/homeassistant/components/snapcast/ @luar123
|
/homeassistant/components/snapcast/ @luar123
|
||||||
/tests/components/snapcast/ @luar123
|
/tests/components/snapcast/ @luar123
|
||||||
/homeassistant/components/snmp/ @nmaggioni
|
/homeassistant/components/snmp/ @nmaggioni
|
||||||
@@ -1721,8 +1727,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||||
/homeassistant/components/valve/ @home-assistant/core
|
/homeassistant/components/valve/ @home-assistant/core
|
||||||
/tests/components/valve/ @home-assistant/core
|
/tests/components/valve/ @home-assistant/core
|
||||||
/homeassistant/components/vegehub/ @ghowevege
|
/homeassistant/components/vegehub/ @thulrus
|
||||||
/tests/components/vegehub/ @ghowevege
|
/tests/components/vegehub/ @thulrus
|
||||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||||
/tests/components/velbus/ @Cereal2nd @brefra
|
/tests/components/velbus/ @Cereal2nd @brefra
|
||||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||||
@@ -1736,6 +1742,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||||
/homeassistant/components/vicare/ @CFenner
|
/homeassistant/components/vicare/ @CFenner
|
||||||
/tests/components/vicare/ @CFenner
|
/tests/components/vicare/ @CFenner
|
||||||
|
/homeassistant/components/victron_ble/ @rajlaud
|
||||||
|
/tests/components/victron_ble/ @rajlaud
|
||||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||||
/homeassistant/components/vilfo/ @ManneW
|
/homeassistant/components/vilfo/ @ManneW
|
||||||
@@ -1821,8 +1829,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ws66i/ @ssaenger
|
/tests/components/ws66i/ @ssaenger
|
||||||
/homeassistant/components/wyoming/ @synesthesiam
|
/homeassistant/components/wyoming/ @synesthesiam
|
||||||
/tests/components/wyoming/ @synesthesiam
|
/tests/components/wyoming/ @synesthesiam
|
||||||
/homeassistant/components/xbox/ @hunterjm
|
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||||
/tests/components/xbox/ @hunterjm
|
/tests/components/xbox/ @hunterjm @tr4nt0r
|
||||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||||
|
|||||||
4
Dockerfile
generated
4
Dockerfile
generated
@@ -21,11 +21,9 @@ ARG BUILD_ARCH
|
|||||||
RUN \
|
RUN \
|
||||||
case "${BUILD_ARCH}" in \
|
case "${BUILD_ARCH}" in \
|
||||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||||
"armhf") go2rtc_suffix='armv6' ;; \
|
|
||||||
"armv7") go2rtc_suffix='arm' ;; \
|
|
||||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||||
esac \
|
esac \
|
||||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||||
&& chmod +x /bin/go2rtc \
|
&& chmod +x /bin/go2rtc \
|
||||||
# Verify go2rtc can be executed
|
# Verify go2rtc can be executed
|
||||||
&& go2rtc --version
|
&& go2rtc --version
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ RUN \
|
|||||||
libavcodec-dev \
|
libavcodec-dev \
|
||||||
libavdevice-dev \
|
libavdevice-dev \
|
||||||
libavutil-dev \
|
libavutil-dev \
|
||||||
libgammu-dev \
|
|
||||||
libswscale-dev \
|
libswscale-dev \
|
||||||
libswresample-dev \
|
libswresample-dev \
|
||||||
libavfilter-dev \
|
libavfilter-dev \
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
|
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
|
||||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
|
||||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
|
||||||
cosign:
|
cosign:
|
||||||
base_identity: https://github.com/home-assistant/docker/.*
|
base_identity: https://github.com/home-assistant/docker/.*
|
||||||
identity: https://github.com/home-assistant/core/.*
|
identity: https://github.com/home-assistant/core/.*
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ Sending HOTP through notify service
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections import OrderedDict
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
@@ -304,14 +303,15 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
|||||||
if not self._available_notify_services:
|
if not self._available_notify_services:
|
||||||
return self.async_abort(reason="no_available_service")
|
return self.async_abort(reason="no_available_service")
|
||||||
|
|
||||||
schema: dict[str, Any] = OrderedDict()
|
schema = vol.Schema(
|
||||||
schema["notify_service"] = vol.In(self._available_notify_services)
|
{
|
||||||
schema["target"] = vol.Optional(str)
|
vol.Required("notify_service"): vol.In(self._available_notify_services),
|
||||||
|
vol.Optional("target"): str,
|
||||||
return self.async_show_form(
|
}
|
||||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
|
||||||
|
|
||||||
async def async_step_setup(
|
async def async_step_setup(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
) -> FlowResult:
|
) -> FlowResult:
|
||||||
|
|||||||
@@ -179,12 +179,18 @@ class Data:
|
|||||||
user_hash = base64.b64decode(found["password"])
|
user_hash = base64.b64decode(found["password"])
|
||||||
|
|
||||||
# bcrypt.checkpw is timing-safe
|
# bcrypt.checkpw is timing-safe
|
||||||
if not bcrypt.checkpw(password.encode(), user_hash):
|
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||||
|
# Previously the password was silently truncated.
|
||||||
|
# https://github.com/pyca/bcrypt/pull/1000
|
||||||
|
if not bcrypt.checkpw(password.encode()[:72], user_hash):
|
||||||
raise InvalidAuth
|
raise InvalidAuth
|
||||||
|
|
||||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
||||||
"""Encode a password."""
|
"""Encode a password."""
|
||||||
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||||
|
# Previously the password was silently truncated.
|
||||||
|
# https://github.com/pyca/bcrypt/pull/1000
|
||||||
|
hashed: bytes = bcrypt.hashpw(password.encode()[:72], bcrypt.gensalt(rounds=12))
|
||||||
|
|
||||||
if for_storage:
|
if for_storage:
|
||||||
hashed = base64.b64encode(hashed)
|
hashed = base64.b64encode(hashed)
|
||||||
|
|||||||
@@ -176,6 +176,8 @@ FRONTEND_INTEGRATIONS = {
|
|||||||
STAGE_0_INTEGRATIONS = (
|
STAGE_0_INTEGRATIONS = (
|
||||||
# Load logging and http deps as soon as possible
|
# Load logging and http deps as soon as possible
|
||||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
|
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
|
||||||
|
# Setup labs for preview features
|
||||||
|
("labs", {"labs"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||||
# Setup frontend
|
# Setup frontend
|
||||||
("frontend", FRONTEND_INTEGRATIONS, None),
|
("frontend", FRONTEND_INTEGRATIONS, None),
|
||||||
# Setup recorder
|
# Setup recorder
|
||||||
@@ -212,6 +214,7 @@ DEFAULT_INTEGRATIONS = {
|
|||||||
"backup",
|
"backup",
|
||||||
"frontend",
|
"frontend",
|
||||||
"hardware",
|
"hardware",
|
||||||
|
"labs",
|
||||||
"logger",
|
"logger",
|
||||||
"network",
|
"network",
|
||||||
"system_health",
|
"system_health",
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
"google_tasks",
|
"google_tasks",
|
||||||
"google_translate",
|
"google_translate",
|
||||||
"google_travel_time",
|
"google_travel_time",
|
||||||
|
"google_weather",
|
||||||
"google_wifi",
|
"google_wifi",
|
||||||
"google",
|
"google",
|
||||||
"nest",
|
"nest",
|
||||||
|
|||||||
5
homeassistant/brands/victron.json
Normal file
5
homeassistant/brands/victron.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "victron",
|
||||||
|
"name": "Victron",
|
||||||
|
"integrations": ["victron_ble", "victron_remote_monitoring"]
|
||||||
|
}
|
||||||
@@ -1,11 +1,5 @@
|
|||||||
{
|
{
|
||||||
"domain": "yale",
|
"domain": "yale",
|
||||||
"name": "Yale",
|
"name": "Yale (non-US/Canada)",
|
||||||
"integrations": [
|
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
|
||||||
"august",
|
|
||||||
"yale_smart_alarm",
|
|
||||||
"yalexs_ble",
|
|
||||||
"yale_home",
|
|
||||||
"yale"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
5
homeassistant/brands/yale_august.json
Normal file
5
homeassistant/brands/yale_august.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "yale_august",
|
||||||
|
"name": "Yale August (US/Canada)",
|
||||||
|
"integrations": ["august", "august_ble"]
|
||||||
|
}
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
"""The Actron Air integration."""
|
"""The Actron Air integration."""
|
||||||
|
|
||||||
from actron_neo_api import (
|
from actron_neo_api import (
|
||||||
ActronAirNeoACSystem,
|
ActronAirACSystem,
|
||||||
ActronNeoAPI,
|
ActronAirAPI,
|
||||||
ActronNeoAPIError,
|
ActronAirAPIError,
|
||||||
ActronNeoAuthError,
|
ActronAirAuthError,
|
||||||
)
|
)
|
||||||
|
|
||||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||||
@@ -23,16 +23,16 @@ PLATFORM = [Platform.CLIMATE]
|
|||||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||||
"""Set up Actron Air integration from a config entry."""
|
"""Set up Actron Air integration from a config entry."""
|
||||||
|
|
||||||
api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
api = ActronAirAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||||
systems: list[ActronAirNeoACSystem] = []
|
systems: list[ActronAirACSystem] = []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
systems = await api.get_ac_systems()
|
systems = await api.get_ac_systems()
|
||||||
await api.update_status()
|
await api.update_status()
|
||||||
except ActronNeoAuthError:
|
except ActronAirAuthError:
|
||||||
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||||
raise
|
raise
|
||||||
except ActronNeoAPIError as err:
|
except ActronAirAPIError as err:
|
||||||
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone
|
from actron_neo_api import ActronAirStatus, ActronAirZone
|
||||||
|
|
||||||
from homeassistant.components.climate import (
|
from homeassistant.components.climate import (
|
||||||
FAN_AUTO,
|
FAN_AUTO,
|
||||||
@@ -132,7 +132,7 @@ class ActronSystemClimate(BaseClimateEntity):
|
|||||||
return self._status.max_temp
|
return self._status.max_temp
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _status(self) -> ActronAirNeoStatus:
|
def _status(self) -> ActronAirStatus:
|
||||||
"""Get the current status from the coordinator."""
|
"""Get the current status from the coordinator."""
|
||||||
return self.coordinator.data
|
return self.coordinator.data
|
||||||
|
|
||||||
@@ -194,7 +194,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: ActronAirSystemCoordinator,
|
coordinator: ActronAirSystemCoordinator,
|
||||||
zone: ActronAirNeoZone,
|
zone: ActronAirZone,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize an Actron Air unit."""
|
"""Initialize an Actron Air unit."""
|
||||||
super().__init__(coordinator, zone.title)
|
super().__init__(coordinator, zone.title)
|
||||||
@@ -221,7 +221,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
|||||||
return self._zone.max_temp
|
return self._zone.max_temp
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _zone(self) -> ActronAirNeoZone:
|
def _zone(self) -> ActronAirZone:
|
||||||
"""Get the current zone data from the coordinator."""
|
"""Get the current zone data from the coordinator."""
|
||||||
status = self.coordinator.data
|
status = self.coordinator.data
|
||||||
return status.zones[self._zone_id]
|
return status.zones[self._zone_id]
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from actron_neo_api import ActronNeoAPI, ActronNeoAuthError
|
from actron_neo_api import ActronAirAPI, ActronAirAuthError
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_API_TOKEN
|
from homeassistant.const import CONF_API_TOKEN
|
||||||
@@ -17,7 +17,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Initialize the config flow."""
|
"""Initialize the config flow."""
|
||||||
self._api: ActronNeoAPI | None = None
|
self._api: ActronAirAPI | None = None
|
||||||
self._device_code: str | None = None
|
self._device_code: str | None = None
|
||||||
self._user_code: str = ""
|
self._user_code: str = ""
|
||||||
self._verification_uri: str = ""
|
self._verification_uri: str = ""
|
||||||
@@ -30,10 +30,10 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
"""Handle the initial step."""
|
"""Handle the initial step."""
|
||||||
if self._api is None:
|
if self._api is None:
|
||||||
_LOGGER.debug("Initiating device authorization")
|
_LOGGER.debug("Initiating device authorization")
|
||||||
self._api = ActronNeoAPI()
|
self._api = ActronAirAPI()
|
||||||
try:
|
try:
|
||||||
device_code_response = await self._api.request_device_code()
|
device_code_response = await self._api.request_device_code()
|
||||||
except ActronNeoAuthError as err:
|
except ActronAirAuthError as err:
|
||||||
_LOGGER.error("OAuth2 flow failed: %s", err)
|
_LOGGER.error("OAuth2 flow failed: %s", err)
|
||||||
return self.async_abort(reason="oauth2_error")
|
return self.async_abort(reason="oauth2_error")
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
try:
|
try:
|
||||||
await self._api.poll_for_token(self._device_code)
|
await self._api.poll_for_token(self._device_code)
|
||||||
_LOGGER.debug("Authorization successful")
|
_LOGGER.debug("Authorization successful")
|
||||||
except ActronNeoAuthError as ex:
|
except ActronAirAuthError as ex:
|
||||||
_LOGGER.exception("Error while waiting for device authorization")
|
_LOGGER.exception("Error while waiting for device authorization")
|
||||||
raise CannotConnect from ex
|
raise CannotConnect from ex
|
||||||
|
|
||||||
@@ -89,7 +89,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
user_data = await self._api.get_user_info()
|
user_data = await self._api.get_user_info()
|
||||||
except ActronNeoAuthError as err:
|
except ActronAirAuthError as err:
|
||||||
_LOGGER.error("Error getting user info: %s", err)
|
_LOGGER.error("Error getting user info: %s", err)
|
||||||
return self.async_abort(reason="oauth2_error")
|
return self.async_abort(reason="oauth2_error")
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI
|
from actron_neo_api import ActronAirACSystem, ActronAirAPI, ActronAirStatus
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@@ -23,7 +23,7 @@ ERROR_UNKNOWN = "unknown_error"
|
|||||||
class ActronAirRuntimeData:
|
class ActronAirRuntimeData:
|
||||||
"""Runtime data for the Actron Air integration."""
|
"""Runtime data for the Actron Air integration."""
|
||||||
|
|
||||||
api: ActronNeoAPI
|
api: ActronAirAPI
|
||||||
system_coordinators: dict[str, ActronAirSystemCoordinator]
|
system_coordinators: dict[str, ActronAirSystemCoordinator]
|
||||||
|
|
||||||
|
|
||||||
@@ -33,15 +33,15 @@ AUTH_ERROR_THRESHOLD = 3
|
|||||||
SCAN_INTERVAL = timedelta(seconds=30)
|
SCAN_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
|
|
||||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||||
"""System coordinator for Actron Air integration."""
|
"""System coordinator for Actron Air integration."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: ActronAirConfigEntry,
|
entry: ActronAirConfigEntry,
|
||||||
api: ActronNeoAPI,
|
api: ActronAirAPI,
|
||||||
system: ActronAirNeoACSystem,
|
system: ActronAirACSystem,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the coordinator."""
|
"""Initialize the coordinator."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
@@ -57,7 +57,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
|||||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||||
self.last_seen = dt_util.utcnow()
|
self.last_seen = dt_util.utcnow()
|
||||||
|
|
||||||
async def _async_update_data(self) -> ActronAirNeoStatus:
|
async def _async_update_data(self) -> ActronAirStatus:
|
||||||
"""Fetch updates and merge incremental changes into the full state."""
|
"""Fetch updates and merge incremental changes into the full state."""
|
||||||
await self.api.update_status()
|
await self.api.update_status()
|
||||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||||
|
|||||||
@@ -12,5 +12,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["actron-neo-api==0.1.84"]
|
"requirements": ["actron-neo-api==0.1.87"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,11 @@ from homeassistant.const import (
|
|||||||
CONF_UNIQUE_ID,
|
CONF_UNIQUE_ID,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.selector import (
|
||||||
|
TextSelector,
|
||||||
|
TextSelectorConfig,
|
||||||
|
TextSelectorType,
|
||||||
|
)
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ACCOUNT_ID,
|
ACCOUNT_ID,
|
||||||
@@ -66,7 +71,15 @@ class AdaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle the local step."""
|
"""Handle the local step."""
|
||||||
data_schema = vol.Schema(
|
data_schema = vol.Schema(
|
||||||
{vol.Required(WIFI_SSID): str, vol.Required(WIFI_PSWD): str}
|
{
|
||||||
|
vol.Required(WIFI_SSID): str,
|
||||||
|
vol.Required(WIFI_PSWD): TextSelector(
|
||||||
|
TextSelectorConfig(
|
||||||
|
type=TextSelectorType.PASSWORD,
|
||||||
|
autocomplete="current-password",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
}
|
||||||
)
|
)
|
||||||
if user_input is None:
|
if user_input is None:
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
|
|||||||
@@ -2,14 +2,16 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
|
SensorEntityDescription,
|
||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.const import UnitOfEnergy
|
from homeassistant.const import UnitOfEnergy, UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
@@ -20,44 +22,74 @@ from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
|||||||
from .coordinator import AdaxCloudCoordinator
|
from .coordinator import AdaxCloudCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AdaxSensorDescription(SensorEntityDescription):
|
||||||
|
"""Describes Adax sensor entity."""
|
||||||
|
|
||||||
|
data_key: str
|
||||||
|
|
||||||
|
|
||||||
|
SENSORS: tuple[AdaxSensorDescription, ...] = (
|
||||||
|
AdaxSensorDescription(
|
||||||
|
key="temperature",
|
||||||
|
data_key="temperature",
|
||||||
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=1,
|
||||||
|
),
|
||||||
|
AdaxSensorDescription(
|
||||||
|
key="energy",
|
||||||
|
data_key="energyWh",
|
||||||
|
device_class=SensorDeviceClass.ENERGY,
|
||||||
|
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||||
|
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
|
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||||
|
suggested_display_precision=3,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: AdaxConfigEntry,
|
entry: AdaxConfigEntry,
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Adax energy sensors with config flow."""
|
"""Set up the Adax sensors with config flow."""
|
||||||
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
||||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||||
|
|
||||||
# Create individual energy sensors for each device
|
# Create individual energy sensors for each device
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
AdaxEnergySensor(cloud_coordinator, device_id)
|
[
|
||||||
for device_id in cloud_coordinator.data
|
AdaxSensor(cloud_coordinator, entity_description, device_id)
|
||||||
|
for device_id in cloud_coordinator.data
|
||||||
|
for entity_description in SENSORS
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||||
"""Representation of an Adax energy sensor."""
|
"""Representation of an Adax sensor."""
|
||||||
|
|
||||||
|
entity_description: AdaxSensorDescription
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
_attr_translation_key = "energy"
|
|
||||||
_attr_device_class = SensorDeviceClass.ENERGY
|
|
||||||
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
|
||||||
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
|
||||||
_attr_state_class = SensorStateClass.TOTAL_INCREASING
|
|
||||||
_attr_suggested_display_precision = 3
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AdaxCloudCoordinator,
|
coordinator: AdaxCloudCoordinator,
|
||||||
|
entity_description: AdaxSensorDescription,
|
||||||
device_id: str,
|
device_id: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the energy sensor."""
|
"""Initialize the sensor."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
self.entity_description = entity_description
|
||||||
self._device_id = device_id
|
self._device_id = device_id
|
||||||
room = coordinator.data[device_id]
|
room = coordinator.data[device_id]
|
||||||
|
|
||||||
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
|
self._attr_unique_id = (
|
||||||
|
f"{room['homeId']}_{device_id}_{self.entity_description.key}"
|
||||||
|
)
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, device_id)},
|
identifiers={(DOMAIN, device_id)},
|
||||||
name=room["name"],
|
name=room["name"],
|
||||||
@@ -68,10 +100,14 @@ class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
|||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return True if entity is available."""
|
"""Return True if entity is available."""
|
||||||
return (
|
return (
|
||||||
super().available and "energyWh" in self.coordinator.data[self._device_id]
|
super().available
|
||||||
|
and self.entity_description.data_key
|
||||||
|
in self.coordinator.data[self._device_id]
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_value(self) -> int:
|
def native_value(self) -> int | float | None:
|
||||||
"""Return the native value of the sensor."""
|
"""Return the native value of the sensor."""
|
||||||
return int(self.coordinator.data[self._device_id]["energyWh"])
|
return self.coordinator.data[self._device_id].get(
|
||||||
|
self.entity_description.data_key
|
||||||
|
)
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
|
|||||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||||
)
|
)
|
||||||
|
|
||||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
PLATFORMS = [Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
|
||||||
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
|
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,5 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["adguardhome"],
|
"loggers": ["adguardhome"],
|
||||||
"requirements": ["adguardhome==0.7.0"]
|
"requirements": ["adguardhome==0.8.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
71
homeassistant/components/adguard/update.py
Normal file
71
homeassistant/components/adguard/update.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
"""AdGuard Home Update platform."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from adguardhome import AdGuardHomeError
|
||||||
|
|
||||||
|
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from . import AdGuardConfigEntry, AdGuardData
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .entity import AdGuardHomeEntity
|
||||||
|
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=300)
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AdGuardConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up AdGuard Home update entity based on a config entry."""
|
||||||
|
data = entry.runtime_data
|
||||||
|
|
||||||
|
if (await data.client.update.update_available()).disabled:
|
||||||
|
return
|
||||||
|
|
||||||
|
async_add_entities([AdGuardHomeUpdate(data, entry)], True)
|
||||||
|
|
||||||
|
|
||||||
|
class AdGuardHomeUpdate(AdGuardHomeEntity, UpdateEntity):
|
||||||
|
"""Defines an AdGuard Home update."""
|
||||||
|
|
||||||
|
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||||
|
_attr_name = None
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
data: AdGuardData,
|
||||||
|
entry: AdGuardConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize AdGuard Home update."""
|
||||||
|
super().__init__(data, entry)
|
||||||
|
|
||||||
|
self._attr_unique_id = "_".join(
|
||||||
|
[DOMAIN, self.adguard.host, str(self.adguard.port), "update"]
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _adguard_update(self) -> None:
|
||||||
|
"""Update AdGuard Home entity."""
|
||||||
|
value = await self.adguard.update.update_available()
|
||||||
|
self._attr_installed_version = self.data.version
|
||||||
|
self._attr_latest_version = value.new_version
|
||||||
|
self._attr_release_summary = value.announcement
|
||||||
|
self._attr_release_url = value.announcement_url
|
||||||
|
|
||||||
|
async def async_install(
|
||||||
|
self, version: str | None, backup: bool, **kwargs: Any
|
||||||
|
) -> None:
|
||||||
|
"""Install latest update."""
|
||||||
|
try:
|
||||||
|
await self.adguard.update.begin_update()
|
||||||
|
except AdGuardHomeError as err:
|
||||||
|
raise HomeAssistantError(f"Failed to install update: {err}") from err
|
||||||
|
self.hass.config_entries.async_schedule_reload(self._entry.entry_id)
|
||||||
@@ -30,6 +30,7 @@ generate_data:
|
|||||||
media:
|
media:
|
||||||
accept:
|
accept:
|
||||||
- "*"
|
- "*"
|
||||||
|
multiple: true
|
||||||
generate_image:
|
generate_image:
|
||||||
fields:
|
fields:
|
||||||
task_name:
|
task_name:
|
||||||
@@ -57,3 +58,4 @@ generate_image:
|
|||||||
media:
|
media:
|
||||||
accept:
|
accept:
|
||||||
- "*"
|
- "*"
|
||||||
|
multiple: true
|
||||||
|
|||||||
29
homeassistant/components/airobot/__init__.py
Normal file
29
homeassistant/components/airobot/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
"""The Airobot integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from homeassistant.const import Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||||
|
|
||||||
|
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||||
|
"""Set up Airobot from a config entry."""
|
||||||
|
coordinator = AirobotDataUpdateCoordinator(hass, entry)
|
||||||
|
|
||||||
|
# Fetch initial data so we have data when entities subscribe
|
||||||
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
151
homeassistant/components/airobot/climate.py
Normal file
151
homeassistant/components/airobot/climate.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
"""Climate platform for Airobot thermostat."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pyairobotrest.const import (
|
||||||
|
MODE_AWAY,
|
||||||
|
MODE_HOME,
|
||||||
|
SETPOINT_TEMP_MAX,
|
||||||
|
SETPOINT_TEMP_MIN,
|
||||||
|
)
|
||||||
|
from pyairobotrest.exceptions import AirobotError
|
||||||
|
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||||
|
|
||||||
|
from homeassistant.components.climate import (
|
||||||
|
PRESET_AWAY,
|
||||||
|
PRESET_BOOST,
|
||||||
|
PRESET_HOME,
|
||||||
|
ClimateEntity,
|
||||||
|
ClimateEntityFeature,
|
||||||
|
HVACAction,
|
||||||
|
HVACMode,
|
||||||
|
)
|
||||||
|
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from . import AirobotConfigEntry
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .entity import AirobotEntity
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
_PRESET_MODE_2_MODE = {
|
||||||
|
PRESET_AWAY: MODE_AWAY,
|
||||||
|
PRESET_HOME: MODE_HOME,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AirobotConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up Airobot climate platform."""
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
async_add_entities([AirobotClimate(coordinator)])
|
||||||
|
|
||||||
|
|
||||||
|
class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||||
|
"""Representation of an Airobot thermostat."""
|
||||||
|
|
||||||
|
_attr_name = None
|
||||||
|
_attr_translation_key = "thermostat"
|
||||||
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
|
_attr_hvac_modes = [HVACMode.HEAT]
|
||||||
|
_attr_preset_modes = [PRESET_HOME, PRESET_AWAY, PRESET_BOOST]
|
||||||
|
_attr_supported_features = (
|
||||||
|
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||||
|
)
|
||||||
|
_attr_min_temp = SETPOINT_TEMP_MIN
|
||||||
|
_attr_max_temp = SETPOINT_TEMP_MAX
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _status(self) -> ThermostatStatus:
|
||||||
|
"""Get status from coordinator data."""
|
||||||
|
return self.coordinator.data.status
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _settings(self) -> ThermostatSettings:
|
||||||
|
"""Get settings from coordinator data."""
|
||||||
|
return self.coordinator.data.settings
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_temperature(self) -> float | None:
|
||||||
|
"""Return the current temperature."""
|
||||||
|
return self._status.temp_air
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature(self) -> float | None:
|
||||||
|
"""Return the target temperature."""
|
||||||
|
if self._settings.is_home_mode:
|
||||||
|
return self._settings.setpoint_temp
|
||||||
|
return self._settings.setpoint_temp_away
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_mode(self) -> HVACMode:
|
||||||
|
"""Return current HVAC mode."""
|
||||||
|
if self._status.is_heating:
|
||||||
|
return HVACMode.HEAT
|
||||||
|
return HVACMode.OFF
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_action(self) -> HVACAction:
|
||||||
|
"""Return current HVAC action."""
|
||||||
|
if self._status.is_heating:
|
||||||
|
return HVACAction.HEATING
|
||||||
|
return HVACAction.IDLE
|
||||||
|
|
||||||
|
@property
|
||||||
|
def preset_mode(self) -> str | None:
|
||||||
|
"""Return current preset mode."""
|
||||||
|
if self._settings.setting_flags.boost_enabled:
|
||||||
|
return PRESET_BOOST
|
||||||
|
if self._settings.is_home_mode:
|
||||||
|
return PRESET_HOME
|
||||||
|
return PRESET_AWAY
|
||||||
|
|
||||||
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
|
"""Set new target temperature."""
|
||||||
|
temperature = kwargs[ATTR_TEMPERATURE]
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self._settings.is_home_mode:
|
||||||
|
await self.coordinator.client.set_home_temperature(float(temperature))
|
||||||
|
else:
|
||||||
|
await self.coordinator.client.set_away_temperature(float(temperature))
|
||||||
|
except AirobotError as err:
|
||||||
|
raise ServiceValidationError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="set_temperature_failed",
|
||||||
|
translation_placeholders={"temperature": str(temperature)},
|
||||||
|
) from err
|
||||||
|
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
|
|
||||||
|
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||||
|
"""Set new preset mode."""
|
||||||
|
try:
|
||||||
|
if preset_mode == PRESET_BOOST:
|
||||||
|
# Enable boost mode
|
||||||
|
if not self._settings.setting_flags.boost_enabled:
|
||||||
|
await self.coordinator.client.set_boost_mode(True)
|
||||||
|
else:
|
||||||
|
# Disable boost mode if it's enabled
|
||||||
|
if self._settings.setting_flags.boost_enabled:
|
||||||
|
await self.coordinator.client.set_boost_mode(False)
|
||||||
|
|
||||||
|
# Set the mode (HOME or AWAY)
|
||||||
|
await self.coordinator.client.set_mode(_PRESET_MODE_2_MODE[preset_mode])
|
||||||
|
|
||||||
|
except AirobotError as err:
|
||||||
|
raise ServiceValidationError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="set_preset_mode_failed",
|
||||||
|
translation_placeholders={"preset_mode": preset_mode},
|
||||||
|
) from err
|
||||||
|
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
183
homeassistant/components/airobot/config_flow.py
Normal file
183
homeassistant/components/airobot/config_flow.py
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
"""Config flow for the Airobot integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pyairobotrest import AirobotClient
|
||||||
|
from pyairobotrest.exceptions import (
|
||||||
|
AirobotAuthError,
|
||||||
|
AirobotConnectionError,
|
||||||
|
AirobotError,
|
||||||
|
AirobotTimeoutError,
|
||||||
|
)
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigFlow as BaseConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_HOST): str,
|
||||||
|
vol.Required(CONF_USERNAME): str,
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DeviceInfo:
|
||||||
|
"""Device information."""
|
||||||
|
|
||||||
|
title: str
|
||||||
|
device_id: str
|
||||||
|
|
||||||
|
|
||||||
|
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> DeviceInfo:
|
||||||
|
"""Validate the user input allows us to connect.
|
||||||
|
|
||||||
|
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||||
|
"""
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
|
||||||
|
client = AirobotClient(
|
||||||
|
host=data[CONF_HOST],
|
||||||
|
username=data[CONF_USERNAME],
|
||||||
|
password=data[CONF_PASSWORD],
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Try to fetch data to validate connection and authentication
|
||||||
|
status = await client.get_statuses()
|
||||||
|
settings = await client.get_settings()
|
||||||
|
except AirobotAuthError as err:
|
||||||
|
raise InvalidAuth from err
|
||||||
|
except (AirobotConnectionError, AirobotTimeoutError, AirobotError) as err:
|
||||||
|
raise CannotConnect from err
|
||||||
|
|
||||||
|
# Use device name or device ID as title
|
||||||
|
title = settings.device_name or status.device_id
|
||||||
|
|
||||||
|
return DeviceInfo(title=title, device_id=status.device_id)
|
||||||
|
|
||||||
|
|
||||||
|
class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Airobot."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
MINOR_VERSION = 1
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the config flow."""
|
||||||
|
self._discovered_host: str | None = None
|
||||||
|
self._discovered_mac: str | None = None
|
||||||
|
self._discovered_device_id: str | None = None
|
||||||
|
|
||||||
|
async def async_step_dhcp(
|
||||||
|
self, discovery_info: DhcpServiceInfo
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle DHCP discovery."""
|
||||||
|
# Store the discovered IP address and MAC
|
||||||
|
self._discovered_host = discovery_info.ip
|
||||||
|
self._discovered_mac = discovery_info.macaddress
|
||||||
|
|
||||||
|
# Extract device_id from hostname (format: airobot-thermostat-t01xxxxxx)
|
||||||
|
hostname = discovery_info.hostname.lower()
|
||||||
|
device_id = hostname.replace("airobot-thermostat-", "").upper()
|
||||||
|
self._discovered_device_id = device_id
|
||||||
|
# Set unique_id to device_id for duplicate detection
|
||||||
|
await self.async_set_unique_id(device_id)
|
||||||
|
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||||
|
|
||||||
|
# Show the confirmation form
|
||||||
|
return await self.async_step_dhcp_confirm()
|
||||||
|
|
||||||
|
async def async_step_dhcp_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle DHCP discovery confirmation - ask for credentials only."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
# Combine discovered host and device_id with user-provided password
|
||||||
|
data = {
|
||||||
|
CONF_HOST: self._discovered_host,
|
||||||
|
CONF_USERNAME: self._discovered_device_id,
|
||||||
|
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
info = await validate_input(self.hass, data)
|
||||||
|
except CannotConnect:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidAuth:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
# Store MAC address in config entry data
|
||||||
|
if self._discovered_mac:
|
||||||
|
data[CONF_MAC] = self._discovered_mac
|
||||||
|
|
||||||
|
return self.async_create_entry(title=info.title, data=data)
|
||||||
|
|
||||||
|
# Only ask for password since we already have the device_id from discovery
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="dhcp_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
description_placeholders={
|
||||||
|
"host": self._discovered_host or "",
|
||||||
|
"device_id": self._discovered_device_id or "",
|
||||||
|
},
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the initial step."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
try:
|
||||||
|
info = await validate_input(self.hass, user_input)
|
||||||
|
except CannotConnect:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidAuth:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
# Use device ID as unique ID to prevent duplicates
|
||||||
|
await self.async_set_unique_id(info.device_id)
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
return self.async_create_entry(title=info.title, data=user_input)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CannotConnect(HomeAssistantError):
|
||||||
|
"""Error to indicate we cannot connect."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidAuth(HomeAssistantError):
|
||||||
|
"""Error to indicate there is invalid auth."""
|
||||||
5
homeassistant/components/airobot/const.py
Normal file
5
homeassistant/components/airobot/const.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
"""Constants for the Airobot integration."""
|
||||||
|
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
DOMAIN: Final = "airobot"
|
||||||
59
homeassistant/components/airobot/coordinator.py
Normal file
59
homeassistant/components/airobot/coordinator.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""Coordinator for the Airobot integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pyairobotrest import AirobotClient
|
||||||
|
from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .models import AirobotData
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Update interval - thermostat measures air every 30 seconds
|
||||||
|
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
|
type AirobotConfigEntry = ConfigEntry[AirobotDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||||
|
"""Class to manage fetching Airobot data."""
|
||||||
|
|
||||||
|
config_entry: AirobotConfigEntry
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, entry: AirobotConfigEntry) -> None:
|
||||||
|
"""Initialize the coordinator."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=UPDATE_INTERVAL,
|
||||||
|
config_entry=entry,
|
||||||
|
)
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
|
||||||
|
self.client = AirobotClient(
|
||||||
|
host=entry.data[CONF_HOST],
|
||||||
|
username=entry.data[CONF_USERNAME],
|
||||||
|
password=entry.data[CONF_PASSWORD],
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> AirobotData:
|
||||||
|
"""Fetch data from API endpoint."""
|
||||||
|
try:
|
||||||
|
status = await self.client.get_statuses()
|
||||||
|
settings = await self.client.get_settings()
|
||||||
|
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||||
|
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||||
|
|
||||||
|
return AirobotData(status=status, settings=settings)
|
||||||
42
homeassistant/components/airobot/entity.py
Normal file
42
homeassistant/components/airobot/entity.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
"""Base entity for Airobot integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from homeassistant.const import CONF_MAC
|
||||||
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AirobotDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
class AirobotEntity(CoordinatorEntity[AirobotDataUpdateCoordinator]):
|
||||||
|
"""Base class for Airobot entities."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AirobotDataUpdateCoordinator,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the entity."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
status = coordinator.data.status
|
||||||
|
settings = coordinator.data.settings
|
||||||
|
|
||||||
|
self._attr_unique_id = status.device_id
|
||||||
|
|
||||||
|
connections = set()
|
||||||
|
if (mac := coordinator.config_entry.data.get(CONF_MAC)) is not None:
|
||||||
|
connections.add((CONNECTION_NETWORK_MAC, mac))
|
||||||
|
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, status.device_id)},
|
||||||
|
connections=connections,
|
||||||
|
name=settings.device_name or status.device_id,
|
||||||
|
manufacturer="Airobot",
|
||||||
|
model="Thermostat",
|
||||||
|
model_id="TE1",
|
||||||
|
sw_version=str(status.fw_version),
|
||||||
|
hw_version=str(status.hw_version),
|
||||||
|
)
|
||||||
17
homeassistant/components/airobot/manifest.json
Normal file
17
homeassistant/components/airobot/manifest.json
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"domain": "airobot",
|
||||||
|
"name": "Airobot",
|
||||||
|
"codeowners": ["@mettolen"],
|
||||||
|
"config_flow": true,
|
||||||
|
"dhcp": [
|
||||||
|
{
|
||||||
|
"hostname": "airobot-thermostat-*"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/airobot",
|
||||||
|
"integration_type": "device",
|
||||||
|
"iot_class": "local_polling",
|
||||||
|
"loggers": ["pyairobotrest"],
|
||||||
|
"quality_scale": "bronze",
|
||||||
|
"requirements": ["pyairobotrest==0.1.0"]
|
||||||
|
}
|
||||||
15
homeassistant/components/airobot/models.py
Normal file
15
homeassistant/components/airobot/models.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
"""Models for the Airobot integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AirobotData:
|
||||||
|
"""Data from the Airobot coordinator."""
|
||||||
|
|
||||||
|
status: ThermostatStatus
|
||||||
|
settings: ThermostatSettings
|
||||||
70
homeassistant/components/airobot/quality_scale.yaml
Normal file
70
homeassistant/components/airobot/quality_scale.yaml
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not register custom actions.
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not register custom actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not use event subscriptions.
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
|
||||||
|
# Silver
|
||||||
|
action-exceptions: done
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters: done
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable: done
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable: done
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow: todo
|
||||||
|
test-coverage: done
|
||||||
|
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: todo
|
||||||
|
discovery-update-info: done
|
||||||
|
discovery: done
|
||||||
|
docs-data-update: done
|
||||||
|
docs-examples: todo
|
||||||
|
docs-known-limitations: todo
|
||||||
|
docs-supported-devices: done
|
||||||
|
docs-supported-functions: done
|
||||||
|
docs-troubleshooting: done
|
||||||
|
docs-use-cases: todo
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: Single device integration, no dynamic device discovery needed.
|
||||||
|
entity-category: done
|
||||||
|
entity-device-class: done
|
||||||
|
entity-disabled-by-default: todo
|
||||||
|
entity-translations: todo
|
||||||
|
exception-translations: done
|
||||||
|
icon-translations: todo
|
||||||
|
reconfiguration-flow: todo
|
||||||
|
repair-issues: todo
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: Single device integration, no stale device handling needed.
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession: done
|
||||||
|
strict-typing: todo
|
||||||
44
homeassistant/components/airobot/strings.json
Normal file
44
homeassistant/components/airobot/strings.json
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"dhcp_confirm": {
|
||||||
|
"data": {
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"password": "The thermostat password."
|
||||||
|
},
|
||||||
|
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
|
"password": "[%key:common::config_flow::data::password%]",
|
||||||
|
"username": "[%key:common::config_flow::data::username%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||||
|
"password": "The thermostat password.",
|
||||||
|
"username": "The thermostat Device ID (e.g., T01XXXXXX)."
|
||||||
|
},
|
||||||
|
"description": "Enter your Airobot thermostat connection details. Find the Device ID and password in the thermostat settings menu under Connectivity → Mobile app."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"set_preset_mode_failed": {
|
||||||
|
"message": "Failed to set preset mode to {preset_mode}."
|
||||||
|
},
|
||||||
|
"set_temperature_failed": {
|
||||||
|
"message": "Failed to set temperature to {temperature}."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -23,7 +23,7 @@ from homeassistant.components.bluetooth import (
|
|||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_ADDRESS
|
from homeassistant.const import CONF_ADDRESS
|
||||||
|
|
||||||
from .const import DOMAIN, MFCT_ID
|
from .const import DEVICE_MODEL, DOMAIN, MFCT_ID
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -128,15 +128,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Confirm discovery."""
|
"""Confirm discovery."""
|
||||||
|
assert self._discovered_device is not None
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if (
|
if self._discovered_device.device.firmware.need_firmware_upgrade:
|
||||||
self._discovered_device is not None
|
|
||||||
and self._discovered_device.device.firmware.need_firmware_upgrade
|
|
||||||
):
|
|
||||||
return self.async_abort(reason="firmware_upgrade_required")
|
return self.async_abort(reason="firmware_upgrade_required")
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self.context["title_placeholders"]["name"], data={}
|
title=self.context["title_placeholders"]["name"],
|
||||||
|
data={DEVICE_MODEL: self._discovered_device.device.model.value},
|
||||||
)
|
)
|
||||||
|
|
||||||
self._set_confirm_only()
|
self._set_confirm_only()
|
||||||
@@ -164,7 +164,10 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
self._discovered_device = discovery
|
self._discovered_device = discovery
|
||||||
|
|
||||||
return self.async_create_entry(title=discovery.name, data={})
|
return self.async_create_entry(
|
||||||
|
title=discovery.name,
|
||||||
|
data={DEVICE_MODEL: discovery.device.model.value},
|
||||||
|
)
|
||||||
|
|
||||||
current_addresses = self._async_current_ids(include_ignore=False)
|
current_addresses = self._async_current_ids(include_ignore=False)
|
||||||
devices: list[BluetoothServiceInfoBleak] = []
|
devices: list[BluetoothServiceInfoBleak] = []
|
||||||
|
|||||||
@@ -1,11 +1,16 @@
|
|||||||
"""Constants for Airthings BLE."""
|
"""Constants for Airthings BLE."""
|
||||||
|
|
||||||
|
from airthings_ble import AirthingsDeviceType
|
||||||
|
|
||||||
DOMAIN = "airthings_ble"
|
DOMAIN = "airthings_ble"
|
||||||
MFCT_ID = 820
|
MFCT_ID = 820
|
||||||
|
|
||||||
VOLUME_BECQUEREL = "Bq/m³"
|
VOLUME_BECQUEREL = "Bq/m³"
|
||||||
VOLUME_PICOCURIE = "pCi/L"
|
VOLUME_PICOCURIE = "pCi/L"
|
||||||
|
|
||||||
|
DEVICE_MODEL = "device_model"
|
||||||
|
|
||||||
DEFAULT_SCAN_INTERVAL = 300
|
DEFAULT_SCAN_INTERVAL = 300
|
||||||
|
DEVICE_SPECIFIC_SCAN_INTERVAL = {AirthingsDeviceType.CORENTIUM_HOME_2.value: 1800}
|
||||||
|
|
||||||
MAX_RETRIES_AFTER_STARTUP = 5
|
MAX_RETRIES_AFTER_STARTUP = 5
|
||||||
|
|||||||
@@ -16,7 +16,12 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
|||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||||
|
|
||||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
from .const import (
|
||||||
|
DEFAULT_SCAN_INTERVAL,
|
||||||
|
DEVICE_MODEL,
|
||||||
|
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -34,12 +39,18 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
|||||||
self.airthings = AirthingsBluetoothDeviceData(
|
self.airthings = AirthingsBluetoothDeviceData(
|
||||||
_LOGGER, hass.config.units is METRIC_SYSTEM
|
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||||
)
|
)
|
||||||
|
|
||||||
|
device_model = entry.data.get(DEVICE_MODEL)
|
||||||
|
interval = DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||||
|
device_model, DEFAULT_SCAN_INTERVAL
|
||||||
|
)
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
update_interval=timedelta(seconds=interval),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_setup(self) -> None:
|
async def _async_setup(self) -> None:
|
||||||
@@ -58,11 +69,29 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
|||||||
)
|
)
|
||||||
self.ble_device = ble_device
|
self.ble_device = ble_device
|
||||||
|
|
||||||
|
if DEVICE_MODEL not in self.config_entry.data:
|
||||||
|
_LOGGER.debug("Fetching device info for migration")
|
||||||
|
try:
|
||||||
|
data = await self.airthings.update_device(self.ble_device)
|
||||||
|
except Exception as err:
|
||||||
|
raise UpdateFailed(
|
||||||
|
f"Unable to fetch data for migration: {err}"
|
||||||
|
) from err
|
||||||
|
|
||||||
|
self.hass.config_entries.async_update_entry(
|
||||||
|
self.config_entry,
|
||||||
|
data={**self.config_entry.data, DEVICE_MODEL: data.model.value},
|
||||||
|
)
|
||||||
|
self.update_interval = timedelta(
|
||||||
|
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||||
|
data.model.value, DEFAULT_SCAN_INTERVAL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
async def _async_update_data(self) -> AirthingsDevice:
|
async def _async_update_data(self) -> AirthingsDevice:
|
||||||
"""Get data from Airthings BLE."""
|
"""Get data from Airthings BLE."""
|
||||||
try:
|
try:
|
||||||
data = await self.airthings.update_device(self.ble_device)
|
data = await self.airthings.update_device(self.ble_device)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -58,7 +58,10 @@ from homeassistant.const import (
|
|||||||
from homeassistant.helpers import network
|
from homeassistant.helpers import network
|
||||||
from homeassistant.util import color as color_util, dt as dt_util
|
from homeassistant.util import color as color_util, dt as dt_util
|
||||||
from homeassistant.util.decorator import Registry
|
from homeassistant.util.decorator import Registry
|
||||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
from homeassistant.util.unit_conversion import (
|
||||||
|
TemperatureConverter,
|
||||||
|
TemperatureDeltaConverter,
|
||||||
|
)
|
||||||
|
|
||||||
from .config import AbstractConfig
|
from .config import AbstractConfig
|
||||||
from .const import (
|
from .const import (
|
||||||
@@ -844,7 +847,7 @@ def temperature_from_object(
|
|||||||
temp -= 273.15
|
temp -= 273.15
|
||||||
|
|
||||||
if interval:
|
if interval:
|
||||||
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
|
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
|
||||||
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice
|
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
|
||||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
from aioamazondevices.structures import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
from homeassistant.components.binary_sensor import (
|
||||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
|||||||
data[CONF_PASSWORD],
|
data[CONF_PASSWORD],
|
||||||
)
|
)
|
||||||
|
|
||||||
return await api.login_mode_interactive(data[CONF_CODE])
|
return await api.login.login_mode_interactive(data[CONF_CODE])
|
||||||
|
|
||||||
|
|
||||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
|||||||
@@ -2,12 +2,13 @@
|
|||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
from aioamazondevices.api import AmazonEchoApi
|
||||||
from aioamazondevices.exceptions import (
|
from aioamazondevices.exceptions import (
|
||||||
CannotAuthenticate,
|
CannotAuthenticate,
|
||||||
CannotConnect,
|
CannotConnect,
|
||||||
CannotRetrieveData,
|
CannotRetrieveData,
|
||||||
)
|
)
|
||||||
|
from aioamazondevices.structures import AmazonDevice
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@@ -15,6 +16,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
|
from homeassistant.helpers.debounce import Debouncer
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||||
@@ -42,6 +44,9 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
|||||||
name=entry.title,
|
name=entry.title,
|
||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||||
|
request_refresh_debouncer=Debouncer(
|
||||||
|
hass, _LOGGER, cooldown=30, immediate=False
|
||||||
|
),
|
||||||
)
|
)
|
||||||
self.api = AmazonEchoApi(
|
self.api = AmazonEchoApi(
|
||||||
session,
|
session,
|
||||||
@@ -54,7 +59,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
|||||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||||
"""Update device data."""
|
"""Update device data."""
|
||||||
try:
|
try:
|
||||||
await self.api.login_mode_stored_data()
|
await self.api.login.login_mode_stored_data()
|
||||||
data = await self.api.get_devices_data()
|
data = await self.api.get_devices_data()
|
||||||
except CannotConnect as err:
|
except CannotConnect as err:
|
||||||
raise UpdateFailed(
|
raise UpdateFailed(
|
||||||
|
|||||||
@@ -2,9 +2,10 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import asdict
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice
|
from aioamazondevices.structures import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.diagnostics import async_redact_data
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||||
@@ -60,5 +61,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
|||||||
"online": device.online,
|
"online": device.online,
|
||||||
"serial number": device.serial_number,
|
"serial number": device.serial_number,
|
||||||
"software version": device.software_version,
|
"software version": device.software_version,
|
||||||
"sensors": device.sensors,
|
"sensors": {key: asdict(sensor) for key, sensor in device.sensors.items()},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Defines a base Alexa Devices entity."""
|
"""Defines a base Alexa Devices entity."""
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice
|
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
||||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
from aioamazondevices.structures import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity import EntityDescription
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
|||||||
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioamazondevices==6.5.6"]
|
"requirements": ["aioamazondevices==9.0.2"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,8 +6,9 @@ from collections.abc import Awaitable, Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
from aioamazondevices.api import AmazonEchoApi
|
||||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||||
|
from aioamazondevices.structures import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ from dataclasses import dataclass
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice
|
from aioamazondevices.const.schedules import (
|
||||||
from aioamazondevices.const import (
|
|
||||||
NOTIFICATION_ALARM,
|
NOTIFICATION_ALARM,
|
||||||
NOTIFICATION_REMINDER,
|
NOTIFICATION_REMINDER,
|
||||||
NOTIFICATION_TIMER,
|
NOTIFICATION_TIMER,
|
||||||
)
|
)
|
||||||
|
from aioamazondevices.structures import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"""Support for services."""
|
"""Support for services."""
|
||||||
|
|
||||||
from aioamazondevices.sounds import SOUNDS_LIST
|
from aioamazondevices.const.sounds import SOUNDS_LIST
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING, Any, Final
|
from typing import TYPE_CHECKING, Any, Final
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice
|
from aioamazondevices.structures import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.switch import (
|
from homeassistant.components.switch import (
|
||||||
DOMAIN as SWITCH_DOMAIN,
|
DOMAIN as SWITCH_DOMAIN,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||||
|
|
||||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||||
|
|||||||
@@ -9,14 +9,14 @@ from homeassistant.helpers import config_validation as cv
|
|||||||
|
|
||||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||||
from .services import setup_services
|
from .services import async_setup_services
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the Amber component."""
|
"""Set up the Amber component."""
|
||||||
setup_services(hass)
|
async_setup_services(hass)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from homeassistant.core import (
|
|||||||
ServiceCall,
|
ServiceCall,
|
||||||
ServiceResponse,
|
ServiceResponse,
|
||||||
SupportsResponse,
|
SupportsResponse,
|
||||||
|
callback,
|
||||||
)
|
)
|
||||||
from homeassistant.exceptions import ServiceValidationError
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||||
@@ -102,7 +103,8 @@ def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def setup_services(hass: HomeAssistant) -> None:
|
@callback
|
||||||
|
def async_setup_services(hass: HomeAssistant) -> None:
|
||||||
"""Set up the services for the Amber integration."""
|
"""Set up the services for the Amber integration."""
|
||||||
|
|
||||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||||
|
|||||||
@@ -6,9 +6,7 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.components import websocket_api
|
from homeassistant.components import websocket_api
|
||||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||||
from homeassistant.core import Event, HassJob, HomeAssistant, callback
|
from homeassistant.core import Event, HomeAssistant, callback
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.util.hass_dict import HassKey
|
from homeassistant.util.hass_dict import HassKey
|
||||||
|
|
||||||
@@ -20,7 +18,7 @@ from .analytics import (
|
|||||||
EntityAnalyticsModifications,
|
EntityAnalyticsModifications,
|
||||||
async_devices_payload,
|
async_devices_payload,
|
||||||
)
|
)
|
||||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, PREFERENCE_SCHEMA
|
||||||
from .http import AnalyticsDevicesView
|
from .http import AnalyticsDevicesView
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
@@ -31,40 +29,43 @@ __all__ = [
|
|||||||
"async_devices_payload",
|
"async_devices_payload",
|
||||||
]
|
]
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
CONF_SNAPSHOTS_URL = "snapshots_url"
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
DOMAIN: vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(CONF_SNAPSHOTS_URL): vol.Any(str, None),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
extra=vol.ALLOW_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the analytics integration."""
|
"""Set up the analytics integration."""
|
||||||
analytics = Analytics(hass)
|
analytics_config = config.get(DOMAIN, {})
|
||||||
|
|
||||||
|
# For now we want to enable device analytics only if the url option
|
||||||
|
# is explicitly listed in YAML.
|
||||||
|
if CONF_SNAPSHOTS_URL in analytics_config:
|
||||||
|
disable_snapshots = False
|
||||||
|
snapshots_url = analytics_config[CONF_SNAPSHOTS_URL]
|
||||||
|
else:
|
||||||
|
disable_snapshots = True
|
||||||
|
snapshots_url = None
|
||||||
|
|
||||||
|
analytics = Analytics(hass, snapshots_url, disable_snapshots)
|
||||||
|
|
||||||
# Load stored data
|
# Load stored data
|
||||||
await analytics.load()
|
await analytics.load()
|
||||||
|
|
||||||
@callback
|
async def start_schedule(_event: Event) -> None:
|
||||||
def start_schedule(_event: Event) -> None:
|
|
||||||
"""Start the send schedule after the started event."""
|
"""Start the send schedule after the started event."""
|
||||||
# Wait 15 min after started
|
await analytics.async_schedule()
|
||||||
async_call_later(
|
|
||||||
hass,
|
|
||||||
900,
|
|
||||||
HassJob(
|
|
||||||
analytics.send_analytics,
|
|
||||||
name="analytics schedule",
|
|
||||||
cancel_on_shutdown=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send every day
|
|
||||||
async_track_time_interval(
|
|
||||||
hass,
|
|
||||||
analytics.send_analytics,
|
|
||||||
INTERVAL,
|
|
||||||
name="analytics daily",
|
|
||||||
cancel_on_shutdown=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||||
|
|
||||||
@@ -111,7 +112,7 @@ async def websocket_analytics_preferences(
|
|||||||
analytics = hass.data[DATA_COMPONENT]
|
analytics = hass.data[DATA_COMPONENT]
|
||||||
|
|
||||||
await analytics.save_preferences(preferences)
|
await analytics.save_preferences(preferences)
|
||||||
await analytics.send_analytics()
|
await analytics.async_schedule()
|
||||||
|
|
||||||
connection.send_result(
|
connection.send_result(
|
||||||
msg["id"],
|
msg["id"],
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ from asyncio import timeout
|
|||||||
from collections.abc import Awaitable, Callable, Iterable, Mapping
|
from collections.abc import Awaitable, Callable, Iterable, Mapping
|
||||||
from dataclasses import asdict as dataclass_asdict, dataclass, field
|
from dataclasses import asdict as dataclass_asdict, dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import random
|
||||||
|
import time
|
||||||
from typing import Any, Protocol
|
from typing import Any, Protocol
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
@@ -31,10 +33,18 @@ from homeassistant.const import (
|
|||||||
BASE_PLATFORMS,
|
BASE_PLATFORMS,
|
||||||
__version__ as HA_VERSION,
|
__version__ as HA_VERSION,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import (
|
||||||
|
CALLBACK_TYPE,
|
||||||
|
HassJob,
|
||||||
|
HomeAssistant,
|
||||||
|
ReleaseChannel,
|
||||||
|
callback,
|
||||||
|
get_release_channel,
|
||||||
|
)
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||||
from homeassistant.helpers.hassio import is_hassio
|
from homeassistant.helpers.hassio import is_hassio
|
||||||
from homeassistant.helpers.singleton import singleton
|
from homeassistant.helpers.singleton import singleton
|
||||||
from homeassistant.helpers.storage import Store
|
from homeassistant.helpers.storage import Store
|
||||||
@@ -49,8 +59,6 @@ from homeassistant.loader import (
|
|||||||
from homeassistant.setup import async_get_loaded_integrations
|
from homeassistant.setup import async_get_loaded_integrations
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ANALYTICS_ENDPOINT_URL,
|
|
||||||
ANALYTICS_ENDPOINT_URL_DEV,
|
|
||||||
ATTR_ADDON_COUNT,
|
ATTR_ADDON_COUNT,
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
ATTR_ARCH,
|
ATTR_ARCH,
|
||||||
@@ -71,6 +79,7 @@ from .const import (
|
|||||||
ATTR_PROTECTED,
|
ATTR_PROTECTED,
|
||||||
ATTR_RECORDER,
|
ATTR_RECORDER,
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
|
ATTR_SNAPSHOTS,
|
||||||
ATTR_STATE_COUNT,
|
ATTR_STATE_COUNT,
|
||||||
ATTR_STATISTICS,
|
ATTR_STATISTICS,
|
||||||
ATTR_SUPERVISOR,
|
ATTR_SUPERVISOR,
|
||||||
@@ -79,9 +88,15 @@ from .const import (
|
|||||||
ATTR_USER_COUNT,
|
ATTR_USER_COUNT,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
BASIC_ENDPOINT_URL,
|
||||||
|
BASIC_ENDPOINT_URL_DEV,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
INTERVAL,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
PREFERENCE_SCHEMA,
|
PREFERENCE_SCHEMA,
|
||||||
|
SNAPSHOT_DEFAULT_URL,
|
||||||
|
SNAPSHOT_URL_PATH,
|
||||||
|
SNAPSHOT_VERSION,
|
||||||
STORAGE_KEY,
|
STORAGE_KEY,
|
||||||
STORAGE_VERSION,
|
STORAGE_VERSION,
|
||||||
)
|
)
|
||||||
@@ -194,13 +209,18 @@ def gen_uuid() -> str:
|
|||||||
return uuid.uuid4().hex
|
return uuid.uuid4().hex
|
||||||
|
|
||||||
|
|
||||||
|
RELEASE_CHANNEL = get_release_channel()
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class AnalyticsData:
|
class AnalyticsData:
|
||||||
"""Analytics data."""
|
"""Analytics data."""
|
||||||
|
|
||||||
onboarded: bool
|
onboarded: bool
|
||||||
preferences: dict[str, bool]
|
preferences: dict[str, bool]
|
||||||
uuid: str | None
|
uuid: str | None = None
|
||||||
|
submission_identifier: str | None = None
|
||||||
|
snapshot_submission_time: float | None = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dict(cls, data: dict[str, Any]) -> AnalyticsData:
|
def from_dict(cls, data: dict[str, Any]) -> AnalyticsData:
|
||||||
@@ -209,29 +229,44 @@ class AnalyticsData:
|
|||||||
data["onboarded"],
|
data["onboarded"],
|
||||||
data["preferences"],
|
data["preferences"],
|
||||||
data["uuid"],
|
data["uuid"],
|
||||||
|
data.get("submission_identifier"),
|
||||||
|
data.get("snapshot_submission_time"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Analytics:
|
class Analytics:
|
||||||
"""Analytics helper class for the analytics integration."""
|
"""Analytics helper class for the analytics integration."""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
snapshots_url: str | None = None,
|
||||||
|
disable_snapshots: bool = False,
|
||||||
|
) -> None:
|
||||||
"""Initialize the Analytics class."""
|
"""Initialize the Analytics class."""
|
||||||
self.hass: HomeAssistant = hass
|
self._hass: HomeAssistant = hass
|
||||||
self.session = async_get_clientsession(hass)
|
self._snapshots_url = snapshots_url
|
||||||
self._data = AnalyticsData(False, {}, None)
|
self._disable_snapshots = disable_snapshots
|
||||||
|
|
||||||
|
self._session = async_get_clientsession(hass)
|
||||||
|
self._data = AnalyticsData(False, {})
|
||||||
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
||||||
|
self._basic_scheduled: CALLBACK_TYPE | None = None
|
||||||
|
self._snapshot_scheduled: CALLBACK_TYPE | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def preferences(self) -> dict:
|
def preferences(self) -> dict:
|
||||||
"""Return the current active preferences."""
|
"""Return the current active preferences."""
|
||||||
preferences = self._data.preferences
|
preferences = self._data.preferences
|
||||||
return {
|
result = {
|
||||||
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
||||||
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
||||||
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
||||||
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
||||||
}
|
}
|
||||||
|
if not self._disable_snapshots:
|
||||||
|
result[ATTR_SNAPSHOTS] = preferences.get(ATTR_SNAPSHOTS, False)
|
||||||
|
return result
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def onboarded(self) -> bool:
|
def onboarded(self) -> bool:
|
||||||
@@ -244,17 +279,17 @@ class Analytics:
|
|||||||
return self._data.uuid
|
return self._data.uuid
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def endpoint(self) -> str:
|
def endpoint_basic(self) -> str:
|
||||||
"""Return the endpoint that will receive the payload."""
|
"""Return the endpoint that will receive the payload."""
|
||||||
if HA_VERSION.endswith("0.dev0"):
|
if RELEASE_CHANNEL is ReleaseChannel.DEV:
|
||||||
# dev installations will contact the dev analytics environment
|
# dev installations will contact the dev analytics environment
|
||||||
return ANALYTICS_ENDPOINT_URL_DEV
|
return BASIC_ENDPOINT_URL_DEV
|
||||||
return ANALYTICS_ENDPOINT_URL
|
return BASIC_ENDPOINT_URL
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supervisor(self) -> bool:
|
def supervisor(self) -> bool:
|
||||||
"""Return bool if a supervisor is present."""
|
"""Return bool if a supervisor is present."""
|
||||||
return is_hassio(self.hass)
|
return is_hassio(self._hass)
|
||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Load preferences."""
|
"""Load preferences."""
|
||||||
@@ -264,7 +299,7 @@ class Analytics:
|
|||||||
|
|
||||||
if (
|
if (
|
||||||
self.supervisor
|
self.supervisor
|
||||||
and (supervisor_info := hassio.get_supervisor_info(self.hass)) is not None
|
and (supervisor_info := hassio.get_supervisor_info(self._hass)) is not None
|
||||||
):
|
):
|
||||||
if not self.onboarded:
|
if not self.onboarded:
|
||||||
# User have not configured analytics, get this setting from the supervisor
|
# User have not configured analytics, get this setting from the supervisor
|
||||||
@@ -277,32 +312,35 @@ class Analytics:
|
|||||||
):
|
):
|
||||||
self._data.preferences[ATTR_DIAGNOSTICS] = False
|
self._data.preferences[ATTR_DIAGNOSTICS] = False
|
||||||
|
|
||||||
|
async def _save(self) -> None:
|
||||||
|
"""Save data."""
|
||||||
|
await self._store.async_save(dataclass_asdict(self._data))
|
||||||
|
|
||||||
async def save_preferences(self, preferences: dict) -> None:
|
async def save_preferences(self, preferences: dict) -> None:
|
||||||
"""Save preferences."""
|
"""Save preferences."""
|
||||||
preferences = PREFERENCE_SCHEMA(preferences)
|
preferences = PREFERENCE_SCHEMA(preferences)
|
||||||
self._data.preferences.update(preferences)
|
self._data.preferences.update(preferences)
|
||||||
self._data.onboarded = True
|
self._data.onboarded = True
|
||||||
|
|
||||||
await self._store.async_save(dataclass_asdict(self._data))
|
await self._save()
|
||||||
|
|
||||||
if self.supervisor:
|
if self.supervisor:
|
||||||
await hassio.async_update_diagnostics(
|
await hassio.async_update_diagnostics(
|
||||||
self.hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
self._hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def send_analytics(self, _: datetime | None = None) -> None:
|
async def send_analytics(self, _: datetime | None = None) -> None:
|
||||||
"""Send analytics."""
|
"""Send analytics."""
|
||||||
hass = self.hass
|
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||||
|
return
|
||||||
|
|
||||||
|
hass = self._hass
|
||||||
supervisor_info = None
|
supervisor_info = None
|
||||||
operating_system_info: dict[str, Any] = {}
|
operating_system_info: dict[str, Any] = {}
|
||||||
|
|
||||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
|
||||||
LOGGER.debug("Nothing to submit")
|
|
||||||
return
|
|
||||||
|
|
||||||
if self._data.uuid is None:
|
if self._data.uuid is None:
|
||||||
self._data.uuid = gen_uuid()
|
self._data.uuid = gen_uuid()
|
||||||
await self._store.async_save(dataclass_asdict(self._data))
|
await self._save()
|
||||||
|
|
||||||
if self.supervisor:
|
if self.supervisor:
|
||||||
supervisor_info = hassio.get_supervisor_info(hass)
|
supervisor_info = hassio.get_supervisor_info(hass)
|
||||||
@@ -436,7 +474,7 @@ class Analytics:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
async with timeout(30):
|
async with timeout(30):
|
||||||
response = await self.session.post(self.endpoint, json=payload)
|
response = await self._session.post(self.endpoint_basic, json=payload)
|
||||||
if response.status == 200:
|
if response.status == 200:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
(
|
(
|
||||||
@@ -449,14 +487,12 @@ class Analytics:
|
|||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Sending analytics failed with statuscode %s from %s",
|
"Sending analytics failed with statuscode %s from %s",
|
||||||
response.status,
|
response.status,
|
||||||
self.endpoint,
|
self.endpoint_basic,
|
||||||
)
|
)
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
LOGGER.error("Timeout sending analytics to %s", BASIC_ENDPOINT_URL)
|
||||||
except aiohttp.ClientError as err:
|
except aiohttp.ClientError as err:
|
||||||
LOGGER.error(
|
LOGGER.error("Error sending analytics to %s: %r", BASIC_ENDPOINT_URL, err)
|
||||||
"Error sending analytics to %s: %r", ANALYTICS_ENDPOINT_URL, err
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_should_report_integration(
|
def _async_should_report_integration(
|
||||||
@@ -480,7 +516,7 @@ class Analytics:
|
|||||||
if not integration.config_flow:
|
if not integration.config_flow:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
entries = self.hass.config_entries.async_entries(integration.domain)
|
entries = self._hass.config_entries.async_entries(integration.domain)
|
||||||
|
|
||||||
# Filter out ignored and disabled entries
|
# Filter out ignored and disabled entries
|
||||||
return any(
|
return any(
|
||||||
@@ -489,6 +525,186 @@ class Analytics:
|
|||||||
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def send_snapshot(self, _: datetime | None = None) -> None:
|
||||||
|
"""Send a snapshot."""
|
||||||
|
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
|
||||||
|
return
|
||||||
|
|
||||||
|
payload = await _async_snapshot_payload(self._hass)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"User-Agent": f"home-assistant/{HA_VERSION}",
|
||||||
|
}
|
||||||
|
if self._data.submission_identifier is not None:
|
||||||
|
headers["X-Device-Database-Submission-Identifier"] = (
|
||||||
|
self._data.submission_identifier
|
||||||
|
)
|
||||||
|
|
||||||
|
url = (
|
||||||
|
self._snapshots_url
|
||||||
|
if self._snapshots_url is not None
|
||||||
|
else SNAPSHOT_DEFAULT_URL
|
||||||
|
)
|
||||||
|
url += SNAPSHOT_URL_PATH
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with timeout(30):
|
||||||
|
response = await self._session.post(url, json=payload, headers=headers)
|
||||||
|
|
||||||
|
if response.status == 200: # OK
|
||||||
|
response_data = await response.json()
|
||||||
|
new_identifier = response_data.get("submission_identifier")
|
||||||
|
|
||||||
|
if (
|
||||||
|
new_identifier is not None
|
||||||
|
and new_identifier != self._data.submission_identifier
|
||||||
|
):
|
||||||
|
self._data.submission_identifier = new_identifier
|
||||||
|
await self._save()
|
||||||
|
|
||||||
|
LOGGER.info(
|
||||||
|
"Submitted snapshot analytics to Home Assistant servers"
|
||||||
|
)
|
||||||
|
|
||||||
|
elif response.status == 400: # Bad Request
|
||||||
|
response_data = await response.json()
|
||||||
|
error_kind = response_data.get("kind", "unknown")
|
||||||
|
error_message = response_data.get("message", "Unknown error")
|
||||||
|
|
||||||
|
if error_kind == "invalid-submission-identifier":
|
||||||
|
# Clear the invalid identifier and retry on next cycle
|
||||||
|
LOGGER.warning(
|
||||||
|
"Invalid submission identifier to %s, clearing: %s",
|
||||||
|
url,
|
||||||
|
error_message,
|
||||||
|
)
|
||||||
|
self._data.submission_identifier = None
|
||||||
|
await self._save()
|
||||||
|
else:
|
||||||
|
LOGGER.warning(
|
||||||
|
"Malformed snapshot analytics submission (%s) to %s: %s",
|
||||||
|
error_kind,
|
||||||
|
url,
|
||||||
|
error_message,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif response.status == 503: # Service Unavailable
|
||||||
|
response_text = await response.text()
|
||||||
|
LOGGER.warning(
|
||||||
|
"Snapshot analytics service %s unavailable: %s",
|
||||||
|
url,
|
||||||
|
response_text,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
LOGGER.warning(
|
||||||
|
"Unexpected status code %s when submitting snapshot analytics to %s",
|
||||||
|
response.status,
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
|
||||||
|
except TimeoutError:
|
||||||
|
LOGGER.error(
|
||||||
|
"Timeout sending snapshot analytics to %s",
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
LOGGER.error(
|
||||||
|
"Error sending snapshot analytics to %s: %r",
|
||||||
|
url,
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_schedule(self) -> None:
|
||||||
|
"""Schedule analytics."""
|
||||||
|
if not self.onboarded:
|
||||||
|
LOGGER.debug("Analytics not scheduled")
|
||||||
|
if self._basic_scheduled is not None:
|
||||||
|
self._basic_scheduled()
|
||||||
|
self._basic_scheduled = None
|
||||||
|
if self._snapshot_scheduled:
|
||||||
|
self._snapshot_scheduled()
|
||||||
|
self._snapshot_scheduled = None
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self.preferences.get(ATTR_BASE, False):
|
||||||
|
LOGGER.debug("Basic analytics not scheduled")
|
||||||
|
if self._basic_scheduled is not None:
|
||||||
|
self._basic_scheduled()
|
||||||
|
self._basic_scheduled = None
|
||||||
|
elif self._basic_scheduled is None:
|
||||||
|
# Wait 15 min after started for basic analytics
|
||||||
|
self._basic_scheduled = async_call_later(
|
||||||
|
self._hass,
|
||||||
|
900,
|
||||||
|
HassJob(
|
||||||
|
self._async_schedule_basic,
|
||||||
|
name="basic analytics schedule",
|
||||||
|
cancel_on_shutdown=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.preferences.get(ATTR_SNAPSHOTS, False) or self._disable_snapshots:
|
||||||
|
LOGGER.debug("Snapshot analytics not scheduled")
|
||||||
|
if self._snapshot_scheduled:
|
||||||
|
self._snapshot_scheduled()
|
||||||
|
self._snapshot_scheduled = None
|
||||||
|
elif self._snapshot_scheduled is None:
|
||||||
|
snapshot_submission_time = self._data.snapshot_submission_time
|
||||||
|
|
||||||
|
interval_seconds = INTERVAL.total_seconds()
|
||||||
|
|
||||||
|
if snapshot_submission_time is None:
|
||||||
|
# Randomize the submission time within the 24 hours
|
||||||
|
snapshot_submission_time = random.uniform(0, interval_seconds)
|
||||||
|
self._data.snapshot_submission_time = snapshot_submission_time
|
||||||
|
await self._save()
|
||||||
|
LOGGER.debug(
|
||||||
|
"Initialized snapshot submission time to %s",
|
||||||
|
snapshot_submission_time,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate delay until next submission
|
||||||
|
current_time = time.time()
|
||||||
|
delay = (snapshot_submission_time - current_time) % interval_seconds
|
||||||
|
|
||||||
|
self._snapshot_scheduled = async_call_later(
|
||||||
|
self._hass,
|
||||||
|
delay,
|
||||||
|
HassJob(
|
||||||
|
self._async_schedule_snapshots,
|
||||||
|
name="snapshot analytics schedule",
|
||||||
|
cancel_on_shutdown=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_schedule_basic(self, _: datetime | None = None) -> None:
|
||||||
|
"""Schedule basic analytics."""
|
||||||
|
await self.send_analytics()
|
||||||
|
|
||||||
|
# Send basic analytics every day
|
||||||
|
self._basic_scheduled = async_track_time_interval(
|
||||||
|
self._hass,
|
||||||
|
self.send_analytics,
|
||||||
|
INTERVAL,
|
||||||
|
name="basic analytics daily",
|
||||||
|
cancel_on_shutdown=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_schedule_snapshots(self, _: datetime | None = None) -> None:
|
||||||
|
"""Schedule snapshot analytics."""
|
||||||
|
await self.send_snapshot()
|
||||||
|
|
||||||
|
# Send snapshot analytics every day
|
||||||
|
self._snapshot_scheduled = async_track_time_interval(
|
||||||
|
self._hass,
|
||||||
|
self.send_snapshot,
|
||||||
|
INTERVAL,
|
||||||
|
name="snapshot analytics daily",
|
||||||
|
cancel_on_shutdown=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||||
"""Extract domains from the YAML configuration."""
|
"""Extract domains from the YAML configuration."""
|
||||||
@@ -505,8 +721,8 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
|||||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||||
|
|
||||||
|
|
||||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||||
"""Return detailed information about entities and devices."""
|
"""Return detailed information about entities and devices for a snapshot."""
|
||||||
dev_reg = dr.async_get(hass)
|
dev_reg = dr.async_get(hass)
|
||||||
ent_reg = er.async_get(hass)
|
ent_reg = er.async_get(hass)
|
||||||
|
|
||||||
@@ -711,8 +927,13 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
|||||||
|
|
||||||
entities_info.append(entity_info)
|
entities_info.append(entity_info)
|
||||||
|
|
||||||
|
return integrations_info
|
||||||
|
|
||||||
|
|
||||||
|
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||||
|
"""Return detailed information about entities and devices for a direct download."""
|
||||||
return {
|
return {
|
||||||
"version": "home-assistant:1",
|
"version": f"home-assistant:{SNAPSHOT_VERSION}",
|
||||||
"home_assistant": HA_VERSION,
|
"home_assistant": HA_VERSION,
|
||||||
"integrations": integrations_info,
|
"integrations": await _async_snapshot_payload(hass),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,13 +5,17 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
|
||||||
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
|
||||||
DOMAIN = "analytics"
|
DOMAIN = "analytics"
|
||||||
INTERVAL = timedelta(days=1)
|
INTERVAL = timedelta(days=1)
|
||||||
STORAGE_KEY = "core.analytics"
|
STORAGE_KEY = "core.analytics"
|
||||||
STORAGE_VERSION = 1
|
STORAGE_VERSION = 1
|
||||||
|
|
||||||
|
BASIC_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||||
|
BASIC_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||||
|
|
||||||
|
SNAPSHOT_VERSION = 1
|
||||||
|
SNAPSHOT_DEFAULT_URL = "https://device-database.eco-dev-aws.openhomefoundation.com"
|
||||||
|
SNAPSHOT_URL_PATH = f"/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||||
|
|
||||||
LOGGER: logging.Logger = logging.getLogger(__package__)
|
LOGGER: logging.Logger = logging.getLogger(__package__)
|
||||||
|
|
||||||
@@ -38,6 +42,7 @@ ATTR_PREFERENCES = "preferences"
|
|||||||
ATTR_PROTECTED = "protected"
|
ATTR_PROTECTED = "protected"
|
||||||
ATTR_RECORDER = "recorder"
|
ATTR_RECORDER = "recorder"
|
||||||
ATTR_SLUG = "slug"
|
ATTR_SLUG = "slug"
|
||||||
|
ATTR_SNAPSHOTS = "snapshots"
|
||||||
ATTR_STATE_COUNT = "state_count"
|
ATTR_STATE_COUNT = "state_count"
|
||||||
ATTR_STATISTICS = "statistics"
|
ATTR_STATISTICS = "statistics"
|
||||||
ATTR_SUPERVISOR = "supervisor"
|
ATTR_SUPERVISOR = "supervisor"
|
||||||
@@ -51,6 +56,7 @@ ATTR_VERSION = "version"
|
|||||||
PREFERENCE_SCHEMA = vol.Schema(
|
PREFERENCE_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_BASE): bool,
|
vol.Optional(ATTR_BASE): bool,
|
||||||
|
vol.Optional(ATTR_SNAPSHOTS): bool,
|
||||||
vol.Optional(ATTR_DIAGNOSTICS): bool,
|
vol.Optional(ATTR_DIAGNOSTICS): bool,
|
||||||
vol.Optional(ATTR_STATISTICS): bool,
|
vol.Optional(ATTR_STATISTICS): bool,
|
||||||
vol.Optional(ATTR_USAGE): bool,
|
vol.Optional(ATTR_USAGE): bool,
|
||||||
|
|||||||
@@ -39,11 +39,11 @@ from .const import (
|
|||||||
CONF_TURN_OFF_COMMAND,
|
CONF_TURN_OFF_COMMAND,
|
||||||
CONF_TURN_ON_COMMAND,
|
CONF_TURN_ON_COMMAND,
|
||||||
DEFAULT_ADB_SERVER_PORT,
|
DEFAULT_ADB_SERVER_PORT,
|
||||||
DEFAULT_DEVICE_CLASS,
|
|
||||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||||
DEFAULT_GET_SOURCES,
|
DEFAULT_GET_SOURCES,
|
||||||
DEFAULT_PORT,
|
DEFAULT_PORT,
|
||||||
DEFAULT_SCREENCAP_INTERVAL,
|
DEFAULT_SCREENCAP_INTERVAL,
|
||||||
|
DEVICE_AUTO,
|
||||||
DEVICE_CLASSES,
|
DEVICE_CLASSES,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
PROP_ETHMAC,
|
PROP_ETHMAC,
|
||||||
@@ -89,8 +89,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
data_schema = vol.Schema(
|
data_schema = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_HOST, default=host): str,
|
vol.Required(CONF_HOST, default=host): str,
|
||||||
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
|
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
|
||||||
DEVICE_CLASSES
|
SelectSelectorConfig(
|
||||||
|
options=[
|
||||||
|
SelectOptionDict(value=k, label=v)
|
||||||
|
for k, v in DEVICE_CLASSES.items()
|
||||||
|
],
|
||||||
|
translation_key="device_class",
|
||||||
|
)
|
||||||
),
|
),
|
||||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -15,15 +15,19 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
|
|||||||
CONF_TURN_ON_COMMAND = "turn_on_command"
|
CONF_TURN_ON_COMMAND = "turn_on_command"
|
||||||
|
|
||||||
DEFAULT_ADB_SERVER_PORT = 5037
|
DEFAULT_ADB_SERVER_PORT = 5037
|
||||||
DEFAULT_DEVICE_CLASS = "auto"
|
|
||||||
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
||||||
DEFAULT_GET_SOURCES = True
|
DEFAULT_GET_SOURCES = True
|
||||||
DEFAULT_PORT = 5555
|
DEFAULT_PORT = 5555
|
||||||
DEFAULT_SCREENCAP_INTERVAL = 5
|
DEFAULT_SCREENCAP_INTERVAL = 5
|
||||||
|
|
||||||
|
DEVICE_AUTO = "auto"
|
||||||
DEVICE_ANDROIDTV = "androidtv"
|
DEVICE_ANDROIDTV = "androidtv"
|
||||||
DEVICE_FIRETV = "firetv"
|
DEVICE_FIRETV = "firetv"
|
||||||
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
|
DEVICE_CLASSES = {
|
||||||
|
DEVICE_AUTO: "auto",
|
||||||
|
DEVICE_ANDROIDTV: "Android TV",
|
||||||
|
DEVICE_FIRETV: "Fire TV",
|
||||||
|
}
|
||||||
|
|
||||||
PROP_ETHMAC = "ethmac"
|
PROP_ETHMAC = "ethmac"
|
||||||
PROP_SERIALNO = "serialno"
|
PROP_SERIALNO = "serialno"
|
||||||
|
|||||||
@@ -65,6 +65,13 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"selector": {
|
||||||
|
"device_class": {
|
||||||
|
"options": {
|
||||||
|
"auto": "Auto-detect device type"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"services": {
|
"services": {
|
||||||
"adb_command": {
|
"adb_command": {
|
||||||
"description": "Sends an ADB command to an Android / Fire TV device.",
|
"description": "Sends an ADB command to an Android / Fire TV device.",
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ from .const import (
|
|||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
)
|
)
|
||||||
|
|
||||||
PLATFORMS = (Platform.CONVERSATION,)
|
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||||
|
|||||||
80
homeassistant/components/anthropic/ai_task.py
Normal file
80
homeassistant/components/anthropic/ai_task.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
"""AI Task integration for Anthropic."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from json import JSONDecodeError
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.components import ai_task, conversation
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.util.json import json_loads
|
||||||
|
|
||||||
|
from .entity import AnthropicBaseLLMEntity
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up AI Task entities."""
|
||||||
|
for subentry in config_entry.subentries.values():
|
||||||
|
if subentry.subentry_type != "ai_task_data":
|
||||||
|
continue
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
[AnthropicTaskEntity(config_entry, subentry)],
|
||||||
|
config_subentry_id=subentry.subentry_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AnthropicTaskEntity(
|
||||||
|
ai_task.AITaskEntity,
|
||||||
|
AnthropicBaseLLMEntity,
|
||||||
|
):
|
||||||
|
"""Anthropic AI Task entity."""
|
||||||
|
|
||||||
|
_attr_supported_features = (
|
||||||
|
ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||||
|
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_generate_data(
|
||||||
|
self,
|
||||||
|
task: ai_task.GenDataTask,
|
||||||
|
chat_log: conversation.ChatLog,
|
||||||
|
) -> ai_task.GenDataTaskResult:
|
||||||
|
"""Handle a generate data task."""
|
||||||
|
await self._async_handle_chat_log(chat_log, task.name, task.structure)
|
||||||
|
|
||||||
|
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||||
|
raise HomeAssistantError(
|
||||||
|
"Last content in chat log is not an AssistantContent"
|
||||||
|
)
|
||||||
|
|
||||||
|
text = chat_log.content[-1].content or ""
|
||||||
|
|
||||||
|
if not task.structure:
|
||||||
|
return ai_task.GenDataTaskResult(
|
||||||
|
conversation_id=chat_log.conversation_id,
|
||||||
|
data=text,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
data = json_loads(text)
|
||||||
|
except JSONDecodeError as err:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Failed to parse JSON response: %s. Response: %s",
|
||||||
|
err,
|
||||||
|
text,
|
||||||
|
)
|
||||||
|
raise HomeAssistantError("Error with Claude structured response") from err
|
||||||
|
|
||||||
|
return ai_task.GenDataTaskResult(
|
||||||
|
conversation_id=chat_log.conversation_id,
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
@@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Mapping
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
import re
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -38,6 +38,7 @@ from homeassistant.helpers.selector import (
|
|||||||
SelectSelectorConfig,
|
SelectSelectorConfig,
|
||||||
TemplateSelector,
|
TemplateSelector,
|
||||||
)
|
)
|
||||||
|
from homeassistant.helpers.typing import VolDictType
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_CHAT_MODEL,
|
CONF_CHAT_MODEL,
|
||||||
@@ -53,8 +54,10 @@ from .const import (
|
|||||||
CONF_WEB_SEARCH_REGION,
|
CONF_WEB_SEARCH_REGION,
|
||||||
CONF_WEB_SEARCH_TIMEZONE,
|
CONF_WEB_SEARCH_TIMEZONE,
|
||||||
CONF_WEB_SEARCH_USER_LOCATION,
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
|
DEFAULT_AI_TASK_NAME,
|
||||||
DEFAULT_CONVERSATION_NAME,
|
DEFAULT_CONVERSATION_NAME,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
NON_THINKING_MODELS,
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TEMPERATURE,
|
RECOMMENDED_TEMPERATURE,
|
||||||
@@ -73,12 +76,16 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
RECOMMENDED_OPTIONS = {
|
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||||
CONF_RECOMMENDED: True,
|
CONF_RECOMMENDED: True,
|
||||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
RECOMMENDED_AI_TASK_OPTIONS = {
|
||||||
|
CONF_RECOMMENDED: True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||||
"""Validate the user input allows us to connect.
|
"""Validate the user input allows us to connect.
|
||||||
@@ -101,7 +108,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle the initial step."""
|
"""Handle the initial step."""
|
||||||
errors = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
self._async_abort_entries_match(user_input)
|
self._async_abort_entries_match(user_input)
|
||||||
@@ -129,10 +136,16 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
subentries=[
|
subentries=[
|
||||||
{
|
{
|
||||||
"subentry_type": "conversation",
|
"subentry_type": "conversation",
|
||||||
"data": RECOMMENDED_OPTIONS,
|
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||||
"title": DEFAULT_CONVERSATION_NAME,
|
"title": DEFAULT_CONVERSATION_NAME,
|
||||||
"unique_id": None,
|
"unique_id": None,
|
||||||
}
|
},
|
||||||
|
{
|
||||||
|
"subentry_type": "ai_task_data",
|
||||||
|
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||||
|
"title": DEFAULT_AI_TASK_NAME,
|
||||||
|
"unique_id": None,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -146,103 +159,279 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
cls, config_entry: ConfigEntry
|
cls, config_entry: ConfigEntry
|
||||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||||
"""Return subentries supported by this integration."""
|
"""Return subentries supported by this integration."""
|
||||||
return {"conversation": ConversationSubentryFlowHandler}
|
return {
|
||||||
|
"conversation": ConversationSubentryFlowHandler,
|
||||||
|
"ai_task_data": ConversationSubentryFlowHandler,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||||
"""Flow for managing conversation subentries."""
|
"""Flow for managing conversation subentries."""
|
||||||
|
|
||||||
last_rendered_recommended = False
|
options: dict[str, Any]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _is_new(self) -> bool:
|
def _is_new(self) -> bool:
|
||||||
"""Return if this is a new subentry."""
|
"""Return if this is a new subentry."""
|
||||||
return self.source == "user"
|
return self.source == "user"
|
||||||
|
|
||||||
async def async_step_set_options(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> SubentryFlowResult:
|
) -> SubentryFlowResult:
|
||||||
"""Set conversation options."""
|
"""Add a subentry."""
|
||||||
|
if self._subentry_type == "ai_task_data":
|
||||||
|
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
||||||
|
else:
|
||||||
|
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||||
|
return await self.async_step_init()
|
||||||
|
|
||||||
|
async def async_step_reconfigure(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> SubentryFlowResult:
|
||||||
|
"""Handle reconfiguration of a subentry."""
|
||||||
|
self.options = self._get_reconfigure_subentry().data.copy()
|
||||||
|
return await self.async_step_init()
|
||||||
|
|
||||||
|
async def async_step_init(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> SubentryFlowResult:
|
||||||
|
"""Set initial options."""
|
||||||
# abort if entry is not loaded
|
# abort if entry is not loaded
|
||||||
if self._get_entry().state != ConfigEntryState.LOADED:
|
if self._get_entry().state != ConfigEntryState.LOADED:
|
||||||
return self.async_abort(reason="entry_not_loaded")
|
return self.async_abort(reason="entry_not_loaded")
|
||||||
|
|
||||||
|
hass_apis: list[SelectOptionDict] = [
|
||||||
|
SelectOptionDict(
|
||||||
|
label=api.name,
|
||||||
|
value=api.id,
|
||||||
|
)
|
||||||
|
for api in llm.async_get_apis(self.hass)
|
||||||
|
]
|
||||||
|
if (suggested_llm_apis := self.options.get(CONF_LLM_HASS_API)) and isinstance(
|
||||||
|
suggested_llm_apis, str
|
||||||
|
):
|
||||||
|
self.options[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||||
|
|
||||||
|
step_schema: VolDictType = {}
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
if user_input is None:
|
if self._is_new:
|
||||||
if self._is_new:
|
if self._subentry_type == "ai_task_data":
|
||||||
options = RECOMMENDED_OPTIONS.copy()
|
default_name = DEFAULT_AI_TASK_NAME
|
||||||
else:
|
else:
|
||||||
# If this is a reconfiguration, we need to copy the existing options
|
default_name = DEFAULT_CONVERSATION_NAME
|
||||||
# so that we can show the current values in the form.
|
step_schema[vol.Required(CONF_NAME, default=default_name)] = str
|
||||||
options = self._get_reconfigure_subentry().data.copy()
|
|
||||||
|
|
||||||
self.last_rendered_recommended = cast(
|
if self._subentry_type == "conversation":
|
||||||
bool, options.get(CONF_RECOMMENDED, False)
|
step_schema.update(
|
||||||
|
{
|
||||||
|
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||||
|
vol.Optional(
|
||||||
|
CONF_LLM_HASS_API,
|
||||||
|
): SelectSelector(
|
||||||
|
SelectSelectorConfig(options=hass_apis, multiple=True)
|
||||||
|
),
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
elif user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
step_schema[
|
||||||
|
vol.Required(
|
||||||
|
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
|
||||||
|
)
|
||||||
|
] = bool
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
if not user_input.get(CONF_LLM_HASS_API):
|
if not user_input.get(CONF_LLM_HASS_API):
|
||||||
user_input.pop(CONF_LLM_HASS_API, None)
|
user_input.pop(CONF_LLM_HASS_API, None)
|
||||||
if user_input.get(
|
|
||||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
if user_input[CONF_RECOMMENDED]:
|
||||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
if not errors:
|
||||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
if self._is_new:
|
||||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
|
return self.async_create_entry(
|
||||||
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
title=user_input.pop(CONF_NAME),
|
||||||
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
data=user_input,
|
||||||
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
|
)
|
||||||
elif user_input.get(
|
return self.async_update_and_abort(
|
||||||
|
self._get_entry(),
|
||||||
|
self._get_reconfigure_subentry(),
|
||||||
|
data=user_input,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.options.update(user_input)
|
||||||
|
if (
|
||||||
|
CONF_LLM_HASS_API in self.options
|
||||||
|
and CONF_LLM_HASS_API not in user_input
|
||||||
|
):
|
||||||
|
self.options.pop(CONF_LLM_HASS_API)
|
||||||
|
if not errors:
|
||||||
|
return await self.async_step_advanced()
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="init",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
vol.Schema(step_schema), self.options
|
||||||
|
),
|
||||||
|
errors=errors or None,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_advanced(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> SubentryFlowResult:
|
||||||
|
"""Manage advanced options."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
step_schema: VolDictType = {
|
||||||
|
vol.Optional(
|
||||||
|
CONF_CHAT_MODEL,
|
||||||
|
default=RECOMMENDED_CHAT_MODEL,
|
||||||
|
): SelectSelector(
|
||||||
|
SelectSelectorConfig(
|
||||||
|
options=await self._get_model_list(), custom_value=True
|
||||||
|
)
|
||||||
|
),
|
||||||
|
vol.Optional(
|
||||||
|
CONF_MAX_TOKENS,
|
||||||
|
default=RECOMMENDED_MAX_TOKENS,
|
||||||
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_TEMPERATURE,
|
||||||
|
default=RECOMMENDED_TEMPERATURE,
|
||||||
|
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||||
|
}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self.options.update(user_input)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
return await self.async_step_model()
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="advanced",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
vol.Schema(step_schema), self.options
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_model(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> SubentryFlowResult:
|
||||||
|
"""Manage model-specific options."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
step_schema: VolDictType = {}
|
||||||
|
|
||||||
|
model = self.options[CONF_CHAT_MODEL]
|
||||||
|
|
||||||
|
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
||||||
|
step_schema[
|
||||||
|
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
|
||||||
|
] = vol.All(
|
||||||
|
NumberSelector(
|
||||||
|
NumberSelectorConfig(
|
||||||
|
min=0,
|
||||||
|
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
vol.Coerce(int),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.options.pop(CONF_THINKING_BUDGET, None)
|
||||||
|
|
||||||
|
if not model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||||
|
step_schema.update(
|
||||||
|
{
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH,
|
||||||
|
): bool,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_MAX_USES,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||||
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||||
|
): bool,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.options.pop(CONF_WEB_SEARCH, None)
|
||||||
|
self.options.pop(CONF_WEB_SEARCH_MAX_USES, None)
|
||||||
|
self.options.pop(CONF_WEB_SEARCH_USER_LOCATION, None)
|
||||||
|
|
||||||
|
self.options.pop(CONF_WEB_SEARCH_CITY, None)
|
||||||
|
self.options.pop(CONF_WEB_SEARCH_REGION, None)
|
||||||
|
self.options.pop(CONF_WEB_SEARCH_COUNTRY, None)
|
||||||
|
self.options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
||||||
|
|
||||||
|
if not step_schema:
|
||||||
|
user_input = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
|
||||||
|
if user_input.get(
|
||||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||||
):
|
):
|
||||||
user_input.update(await self._get_location_data())
|
user_input.update(await self._get_location_data())
|
||||||
|
|
||||||
|
self.options.update(user_input)
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
if self._is_new:
|
if self._is_new:
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=user_input.pop(CONF_NAME),
|
title=self.options.pop(CONF_NAME),
|
||||||
data=user_input,
|
data=self.options,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_update_and_abort(
|
return self.async_update_and_abort(
|
||||||
self._get_entry(),
|
self._get_entry(),
|
||||||
self._get_reconfigure_subentry(),
|
self._get_reconfigure_subentry(),
|
||||||
data=user_input,
|
data=self.options,
|
||||||
)
|
)
|
||||||
|
|
||||||
options = user_input
|
|
||||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
|
||||||
else:
|
|
||||||
# Re-render the options again, now with the recommended options shown/hidden
|
|
||||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
|
||||||
|
|
||||||
options = {
|
|
||||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
|
||||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
|
||||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
|
||||||
}
|
|
||||||
|
|
||||||
suggested_values = options.copy()
|
|
||||||
if not suggested_values.get(CONF_PROMPT):
|
|
||||||
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
|
|
||||||
if (
|
|
||||||
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
|
|
||||||
) and isinstance(suggested_llm_apis, str):
|
|
||||||
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
|
||||||
|
|
||||||
schema = self.add_suggested_values_to_schema(
|
|
||||||
vol.Schema(
|
|
||||||
anthropic_config_option_schema(self.hass, self._is_new, options)
|
|
||||||
),
|
|
||||||
suggested_values,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="set_options",
|
step_id="model",
|
||||||
data_schema=schema,
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
vol.Schema(step_schema), self.options
|
||||||
|
),
|
||||||
errors=errors or None,
|
errors=errors or None,
|
||||||
|
last_step=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _get_model_list(self) -> list[SelectOptionDict]:
|
||||||
|
"""Get list of available models."""
|
||||||
|
try:
|
||||||
|
client = await self.hass.async_add_executor_job(
|
||||||
|
partial(
|
||||||
|
anthropic.AsyncAnthropic,
|
||||||
|
api_key=self._get_entry().data[CONF_API_KEY],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
models = (await client.models.list()).data
|
||||||
|
except anthropic.AnthropicError:
|
||||||
|
models = []
|
||||||
|
_LOGGER.debug("Available models: %s", models)
|
||||||
|
model_options: list[SelectOptionDict] = []
|
||||||
|
short_form = re.compile(r"[^\d]-\d$")
|
||||||
|
for model_info in models:
|
||||||
|
# Resolve alias from versioned model name:
|
||||||
|
model_alias = (
|
||||||
|
model_info.id[:-9]
|
||||||
|
if model_info.id
|
||||||
|
not in ("claude-3-haiku-20240307", "claude-3-opus-20240229")
|
||||||
|
else model_info.id
|
||||||
|
)
|
||||||
|
if short_form.search(model_alias):
|
||||||
|
model_alias += "-0"
|
||||||
|
model_options.append(
|
||||||
|
SelectOptionDict(
|
||||||
|
label=model_info.display_name,
|
||||||
|
value=model_alias,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return model_options
|
||||||
|
|
||||||
async def _get_location_data(self) -> dict[str, str]:
|
async def _get_location_data(self) -> dict[str, str]:
|
||||||
"""Get approximate location data of the user."""
|
"""Get approximate location data of the user."""
|
||||||
location_data: dict[str, str] = {}
|
location_data: dict[str, str] = {}
|
||||||
@@ -304,77 +493,3 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
|||||||
_LOGGER.debug("Location data: %s", location_data)
|
_LOGGER.debug("Location data: %s", location_data)
|
||||||
|
|
||||||
return location_data
|
return location_data
|
||||||
|
|
||||||
async_step_user = async_step_set_options
|
|
||||||
async_step_reconfigure = async_step_set_options
|
|
||||||
|
|
||||||
|
|
||||||
def anthropic_config_option_schema(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
is_new: bool,
|
|
||||||
options: Mapping[str, Any],
|
|
||||||
) -> dict:
|
|
||||||
"""Return a schema for Anthropic completion options."""
|
|
||||||
hass_apis: list[SelectOptionDict] = [
|
|
||||||
SelectOptionDict(
|
|
||||||
label=api.name,
|
|
||||||
value=api.id,
|
|
||||||
)
|
|
||||||
for api in llm.async_get_apis(hass)
|
|
||||||
]
|
|
||||||
|
|
||||||
if is_new:
|
|
||||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
|
||||||
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
schema = {}
|
|
||||||
|
|
||||||
schema.update(
|
|
||||||
{
|
|
||||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
|
||||||
vol.Optional(
|
|
||||||
CONF_LLM_HASS_API,
|
|
||||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
|
||||||
vol.Required(
|
|
||||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
|
||||||
): bool,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if options.get(CONF_RECOMMENDED):
|
|
||||||
return schema
|
|
||||||
|
|
||||||
schema.update(
|
|
||||||
{
|
|
||||||
vol.Optional(
|
|
||||||
CONF_CHAT_MODEL,
|
|
||||||
default=RECOMMENDED_CHAT_MODEL,
|
|
||||||
): str,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_MAX_TOKENS,
|
|
||||||
default=RECOMMENDED_MAX_TOKENS,
|
|
||||||
): int,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_TEMPERATURE,
|
|
||||||
default=RECOMMENDED_TEMPERATURE,
|
|
||||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
|
||||||
vol.Optional(
|
|
||||||
CONF_THINKING_BUDGET,
|
|
||||||
default=RECOMMENDED_THINKING_BUDGET,
|
|
||||||
): int,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_WEB_SEARCH,
|
|
||||||
default=RECOMMENDED_WEB_SEARCH,
|
|
||||||
): bool,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_WEB_SEARCH_MAX_USES,
|
|
||||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
|
||||||
): int,
|
|
||||||
vol.Optional(
|
|
||||||
CONF_WEB_SEARCH_USER_LOCATION,
|
|
||||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
|
||||||
): bool,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return schema
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ DOMAIN = "anthropic"
|
|||||||
LOGGER = logging.getLogger(__package__)
|
LOGGER = logging.getLogger(__package__)
|
||||||
|
|
||||||
DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||||
|
DEFAULT_AI_TASK_NAME = "Claude AI Task"
|
||||||
|
|
||||||
CONF_RECOMMENDED = "recommended"
|
CONF_RECOMMENDED = "recommended"
|
||||||
CONF_PROMPT = "prompt"
|
CONF_PROMPT = "prompt"
|
||||||
|
|||||||
@@ -1,17 +1,24 @@
|
|||||||
"""Base entity for Anthropic."""
|
"""Base entity for Anthropic."""
|
||||||
|
|
||||||
|
import base64
|
||||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
import json
|
import json
|
||||||
|
from mimetypes import guess_file_type
|
||||||
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
from anthropic import AsyncStream
|
from anthropic import AsyncStream
|
||||||
from anthropic.types import (
|
from anthropic.types import (
|
||||||
|
Base64ImageSourceParam,
|
||||||
|
Base64PDFSourceParam,
|
||||||
CitationsDelta,
|
CitationsDelta,
|
||||||
CitationsWebSearchResultLocation,
|
CitationsWebSearchResultLocation,
|
||||||
CitationWebSearchResultLocationParam,
|
CitationWebSearchResultLocationParam,
|
||||||
ContentBlockParam,
|
ContentBlockParam,
|
||||||
|
DocumentBlockParam,
|
||||||
|
ImageBlockParam,
|
||||||
InputJSONDelta,
|
InputJSONDelta,
|
||||||
MessageDeltaUsage,
|
MessageDeltaUsage,
|
||||||
MessageParam,
|
MessageParam,
|
||||||
@@ -37,6 +44,9 @@ from anthropic.types import (
|
|||||||
ThinkingConfigDisabledParam,
|
ThinkingConfigDisabledParam,
|
||||||
ThinkingConfigEnabledParam,
|
ThinkingConfigEnabledParam,
|
||||||
ThinkingDelta,
|
ThinkingDelta,
|
||||||
|
ToolChoiceAnyParam,
|
||||||
|
ToolChoiceAutoParam,
|
||||||
|
ToolChoiceToolParam,
|
||||||
ToolParam,
|
ToolParam,
|
||||||
ToolResultBlockParam,
|
ToolResultBlockParam,
|
||||||
ToolUnionParam,
|
ToolUnionParam,
|
||||||
@@ -50,13 +60,16 @@ from anthropic.types import (
|
|||||||
WebSearchToolResultError,
|
WebSearchToolResultError,
|
||||||
)
|
)
|
||||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||||
|
import voluptuous as vol
|
||||||
from voluptuous_openapi import convert
|
from voluptuous_openapi import convert
|
||||||
|
|
||||||
from homeassistant.components import conversation
|
from homeassistant.components import conversation
|
||||||
from homeassistant.config_entries import ConfigSubentry
|
from homeassistant.config_entries import ConfigSubentry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import device_registry as dr, llm
|
from homeassistant.helpers import device_registry as dr, llm
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from homeassistant.util import slugify
|
||||||
|
|
||||||
from . import AnthropicConfigEntry
|
from . import AnthropicConfigEntry
|
||||||
from .const import (
|
from .const import (
|
||||||
@@ -321,6 +334,7 @@ def _convert_content(
|
|||||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
stream: AsyncStream[MessageStreamEvent],
|
stream: AsyncStream[MessageStreamEvent],
|
||||||
|
output_tool: str | None = None,
|
||||||
) -> AsyncGenerator[
|
) -> AsyncGenerator[
|
||||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||||
]:
|
]:
|
||||||
@@ -378,9 +392,19 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
type="tool_use",
|
type="tool_use",
|
||||||
id=response.content_block.id,
|
id=response.content_block.id,
|
||||||
name=response.content_block.name,
|
name=response.content_block.name,
|
||||||
input="",
|
input={},
|
||||||
)
|
)
|
||||||
current_tool_args = ""
|
current_tool_args = ""
|
||||||
|
if response.content_block.name == output_tool:
|
||||||
|
if first_block or content_details.has_content():
|
||||||
|
if content_details.has_citations():
|
||||||
|
content_details.delete_empty()
|
||||||
|
yield {"native": content_details}
|
||||||
|
content_details = ContentDetails()
|
||||||
|
content_details.add_citation_detail()
|
||||||
|
yield {"role": "assistant"}
|
||||||
|
has_native = False
|
||||||
|
first_block = False
|
||||||
elif isinstance(response.content_block, TextBlock):
|
elif isinstance(response.content_block, TextBlock):
|
||||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||||
first_block
|
first_block
|
||||||
@@ -435,7 +459,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
type="server_tool_use",
|
type="server_tool_use",
|
||||||
id=response.content_block.id,
|
id=response.content_block.id,
|
||||||
name=response.content_block.name,
|
name=response.content_block.name,
|
||||||
input="",
|
input={},
|
||||||
)
|
)
|
||||||
current_tool_args = ""
|
current_tool_args = ""
|
||||||
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||||
@@ -471,7 +495,16 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
first_block = True
|
first_block = True
|
||||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||||
if isinstance(response.delta, InputJSONDelta):
|
if isinstance(response.delta, InputJSONDelta):
|
||||||
current_tool_args += response.delta.partial_json
|
if (
|
||||||
|
current_tool_block is not None
|
||||||
|
and current_tool_block["name"] == output_tool
|
||||||
|
):
|
||||||
|
content_details.citation_details[-1].length += len(
|
||||||
|
response.delta.partial_json
|
||||||
|
)
|
||||||
|
yield {"content": response.delta.partial_json}
|
||||||
|
else:
|
||||||
|
current_tool_args += response.delta.partial_json
|
||||||
elif isinstance(response.delta, TextDelta):
|
elif isinstance(response.delta, TextDelta):
|
||||||
content_details.citation_details[-1].length += len(response.delta.text)
|
content_details.citation_details[-1].length += len(response.delta.text)
|
||||||
yield {"content": response.delta.text}
|
yield {"content": response.delta.text}
|
||||||
@@ -490,6 +523,9 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
content_details.add_citation(response.delta.citation)
|
content_details.add_citation(response.delta.citation)
|
||||||
elif isinstance(response, RawContentBlockStopEvent):
|
elif isinstance(response, RawContentBlockStopEvent):
|
||||||
if current_tool_block is not None:
|
if current_tool_block is not None:
|
||||||
|
if current_tool_block["name"] == output_tool:
|
||||||
|
current_tool_block = None
|
||||||
|
continue
|
||||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||||
current_tool_block["input"] = tool_args
|
current_tool_block["input"] = tool_args
|
||||||
yield {
|
yield {
|
||||||
@@ -557,6 +593,8 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
async def _async_handle_chat_log(
|
async def _async_handle_chat_log(
|
||||||
self,
|
self,
|
||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
|
structure_name: str | None = None,
|
||||||
|
structure: vol.Schema | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Generate an answer for the chat log."""
|
"""Generate an answer for the chat log."""
|
||||||
options = self.subentry.data
|
options = self.subentry.data
|
||||||
@@ -613,6 +651,74 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
}
|
}
|
||||||
tools.append(web_search)
|
tools.append(web_search)
|
||||||
|
|
||||||
|
# Handle attachments by adding them to the last user message
|
||||||
|
last_content = chat_log.content[-1]
|
||||||
|
if last_content.role == "user" and last_content.attachments:
|
||||||
|
last_message = messages[-1]
|
||||||
|
if last_message["role"] != "user":
|
||||||
|
raise HomeAssistantError(
|
||||||
|
"Last message must be a user message to add attachments"
|
||||||
|
)
|
||||||
|
if isinstance(last_message["content"], str):
|
||||||
|
last_message["content"] = [
|
||||||
|
TextBlockParam(type="text", text=last_message["content"])
|
||||||
|
]
|
||||||
|
last_message["content"].extend( # type: ignore[union-attr]
|
||||||
|
await async_prepare_files_for_prompt(
|
||||||
|
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if structure and structure_name:
|
||||||
|
structure_name = slugify(structure_name)
|
||||||
|
if model_args["thinking"]["type"] == "disabled":
|
||||||
|
if not tools:
|
||||||
|
# Simplest case: no tools and no extended thinking
|
||||||
|
# Add a tool and force its use
|
||||||
|
model_args["tool_choice"] = ToolChoiceToolParam(
|
||||||
|
type="tool",
|
||||||
|
name=structure_name,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Second case: tools present but no extended thinking
|
||||||
|
# Allow the model to use any tool but not text response
|
||||||
|
# The model should know to use the right tool by its description
|
||||||
|
model_args["tool_choice"] = ToolChoiceAnyParam(
|
||||||
|
type="any",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Extended thinking is enabled. With extended thinking, we cannot
|
||||||
|
# force tool use or disable text responses, so we add a hint to the
|
||||||
|
# system prompt instead. With extended thinking, the model should be
|
||||||
|
# smart enough to use the tool.
|
||||||
|
model_args["tool_choice"] = ToolChoiceAutoParam(
|
||||||
|
type="auto",
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(model_args["system"], str):
|
||||||
|
model_args["system"] = [
|
||||||
|
TextBlockParam(type="text", text=model_args["system"])
|
||||||
|
]
|
||||||
|
model_args["system"].append( # type: ignore[union-attr]
|
||||||
|
TextBlockParam(
|
||||||
|
type="text",
|
||||||
|
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
tools.append(
|
||||||
|
ToolParam(
|
||||||
|
name=structure_name,
|
||||||
|
description="Use this tool to reply to the user",
|
||||||
|
input_schema=convert(
|
||||||
|
structure,
|
||||||
|
custom_serializer=chat_log.llm_api.custom_serializer
|
||||||
|
if chat_log.llm_api
|
||||||
|
else llm.selector_serializer,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if tools:
|
if tools:
|
||||||
model_args["tools"] = tools
|
model_args["tools"] = tools
|
||||||
|
|
||||||
@@ -629,7 +735,11 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
content
|
content
|
||||||
async for content in chat_log.async_add_delta_content_stream(
|
async for content in chat_log.async_add_delta_content_stream(
|
||||||
self.entity_id,
|
self.entity_id,
|
||||||
_transform_stream(chat_log, stream),
|
_transform_stream(
|
||||||
|
chat_log,
|
||||||
|
stream,
|
||||||
|
output_tool=structure_name if structure else None,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -641,3 +751,59 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
|
|
||||||
if not chat_log.unresponded_tool_results:
|
if not chat_log.unresponded_tool_results:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
|
async def async_prepare_files_for_prompt(
|
||||||
|
hass: HomeAssistant, files: list[tuple[Path, str | None]]
|
||||||
|
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
||||||
|
"""Append files to a prompt.
|
||||||
|
|
||||||
|
Caller needs to ensure that the files are allowed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def append_files_to_content() -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
||||||
|
content: list[ImageBlockParam | DocumentBlockParam] = []
|
||||||
|
|
||||||
|
for file_path, mime_type in files:
|
||||||
|
if not file_path.exists():
|
||||||
|
raise HomeAssistantError(f"`{file_path}` does not exist")
|
||||||
|
|
||||||
|
if mime_type is None:
|
||||||
|
mime_type = guess_file_type(file_path)[0]
|
||||||
|
|
||||||
|
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
||||||
|
raise HomeAssistantError(
|
||||||
|
"Only images and PDF are supported by the Anthropic API,"
|
||||||
|
f"`{file_path}` is not an image file or PDF"
|
||||||
|
)
|
||||||
|
if mime_type == "image/jpg":
|
||||||
|
mime_type = "image/jpeg"
|
||||||
|
|
||||||
|
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
|
||||||
|
|
||||||
|
if mime_type.startswith("image/"):
|
||||||
|
content.append(
|
||||||
|
ImageBlockParam(
|
||||||
|
type="image",
|
||||||
|
source=Base64ImageSourceParam(
|
||||||
|
type="base64",
|
||||||
|
media_type=mime_type, # type: ignore[typeddict-item]
|
||||||
|
data=base64_file,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif mime_type.startswith("application/pdf"):
|
||||||
|
content.append(
|
||||||
|
DocumentBlockParam(
|
||||||
|
type="document",
|
||||||
|
source=Base64PDFSourceParam(
|
||||||
|
type="base64",
|
||||||
|
media_type=mime_type, # type: ignore[typeddict-item]
|
||||||
|
data=base64_file,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
return await hass.async_add_executor_job(append_files_to_content)
|
||||||
|
|||||||
@@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["anthropic==0.69.0"]
|
"requirements": ["anthropic==0.73.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,43 +18,94 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"config_subentries": {
|
"config_subentries": {
|
||||||
|
"ai_task_data": {
|
||||||
|
"abort": {
|
||||||
|
"entry_not_loaded": "[%key:component::anthropic::config_subentries::conversation::abort::entry_not_loaded%]",
|
||||||
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||||
|
},
|
||||||
|
"entry_type": "AI task",
|
||||||
|
"initiate_flow": {
|
||||||
|
"reconfigure": "Reconfigure AI task",
|
||||||
|
"user": "Add AI task"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"advanced": {
|
||||||
|
"data": {
|
||||||
|
"chat_model": "[%key:common::generic::model%]",
|
||||||
|
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
|
||||||
|
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
|
||||||
|
},
|
||||||
|
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
|
||||||
|
},
|
||||||
|
"init": {
|
||||||
|
"data": {
|
||||||
|
"name": "[%key:common::config_flow::data::name%]",
|
||||||
|
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
|
||||||
|
},
|
||||||
|
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
|
||||||
|
},
|
||||||
|
"model": {
|
||||||
|
"data": {
|
||||||
|
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
|
||||||
|
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
|
||||||
|
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
|
||||||
|
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
|
||||||
|
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||||
|
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
|
||||||
|
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
|
||||||
|
},
|
||||||
|
"title": "[%key:component::anthropic::config_subentries::conversation::step::model::title%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"conversation": {
|
"conversation": {
|
||||||
"abort": {
|
"abort": {
|
||||||
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
|
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
|
||||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||||
},
|
},
|
||||||
"entry_type": "Conversation agent",
|
"entry_type": "Conversation agent",
|
||||||
|
|
||||||
"error": {
|
|
||||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.",
|
|
||||||
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
|
|
||||||
},
|
|
||||||
"initiate_flow": {
|
"initiate_flow": {
|
||||||
"reconfigure": "Reconfigure conversation agent",
|
"reconfigure": "Reconfigure conversation agent",
|
||||||
"user": "Add conversation agent"
|
"user": "Add conversation agent"
|
||||||
},
|
},
|
||||||
"step": {
|
"step": {
|
||||||
"set_options": {
|
"advanced": {
|
||||||
"data": {
|
"data": {
|
||||||
"chat_model": "[%key:common::generic::model%]",
|
"chat_model": "[%key:common::generic::model%]",
|
||||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
|
||||||
"max_tokens": "Maximum tokens to return in response",
|
"max_tokens": "Maximum tokens to return in response",
|
||||||
|
"temperature": "Temperature"
|
||||||
|
},
|
||||||
|
"title": "Advanced settings"
|
||||||
|
},
|
||||||
|
"init": {
|
||||||
|
"data": {
|
||||||
|
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||||
"name": "[%key:common::config_flow::data::name%]",
|
"name": "[%key:common::config_flow::data::name%]",
|
||||||
"prompt": "[%key:common::config_flow::data::prompt%]",
|
"prompt": "[%key:common::config_flow::data::prompt%]",
|
||||||
"recommended": "Recommended model settings",
|
"recommended": "Recommended model settings"
|
||||||
"temperature": "Temperature",
|
},
|
||||||
|
"data_description": {
|
||||||
|
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||||
|
},
|
||||||
|
"title": "Basic settings"
|
||||||
|
},
|
||||||
|
"model": {
|
||||||
|
"data": {
|
||||||
"thinking_budget": "Thinking budget",
|
"thinking_budget": "Thinking budget",
|
||||||
"user_location": "Include home location",
|
"user_location": "Include home location",
|
||||||
"web_search": "Enable web search",
|
"web_search": "Enable web search",
|
||||||
"web_search_max_uses": "Maximum web searches"
|
"web_search_max_uses": "Maximum web searches"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
|
||||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||||
"user_location": "Localize search results based on home location",
|
"user_location": "Localize search results based on home location",
|
||||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||||
"web_search_max_uses": "Limit the number of searches performed per response"
|
"web_search_max_uses": "Limit the number of searches performed per response"
|
||||||
}
|
},
|
||||||
|
"title": "Model-specific options"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,3 +7,26 @@ CONNECTION_TIMEOUT: int = 10
|
|||||||
|
|
||||||
# Field name of last self test retrieved from apcupsd.
|
# Field name of last self test retrieved from apcupsd.
|
||||||
LAST_S_TEST: Final = "laststest"
|
LAST_S_TEST: Final = "laststest"
|
||||||
|
|
||||||
|
# Mapping of deprecated sensor keys (as reported by apcupsd, lower-cased) to their deprecation
|
||||||
|
# repair issue translation keys.
|
||||||
|
DEPRECATED_SENSORS: Final = {
|
||||||
|
"apc": "apc_deprecated",
|
||||||
|
"end apc": "date_deprecated",
|
||||||
|
"date": "date_deprecated",
|
||||||
|
"apcmodel": "available_via_device_info",
|
||||||
|
"model": "available_via_device_info",
|
||||||
|
"firmware": "available_via_device_info",
|
||||||
|
"version": "available_via_device_info",
|
||||||
|
"upsname": "available_via_device_info",
|
||||||
|
"serialno": "available_via_device_info",
|
||||||
|
}
|
||||||
|
|
||||||
|
AVAILABLE_VIA_DEVICE_ATTR: Final = {
|
||||||
|
"apcmodel": "model",
|
||||||
|
"model": "model",
|
||||||
|
"firmware": "hw_version",
|
||||||
|
"version": "sw_version",
|
||||||
|
"upsname": "name",
|
||||||
|
"serialno": "serial_number",
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.components.automation import automations_with_entity
|
||||||
|
from homeassistant.components.script import scripts_with_entity
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
@@ -22,9 +24,11 @@ from homeassistant.const import (
|
|||||||
UnitOfTime,
|
UnitOfTime,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
import homeassistant.helpers.issue_registry as ir
|
||||||
|
|
||||||
from .const import LAST_S_TEST
|
from .const import AVAILABLE_VIA_DEVICE_ATTR, DEPRECATED_SENSORS, DOMAIN, LAST_S_TEST
|
||||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||||
from .entity import APCUPSdEntity
|
from .entity import APCUPSdEntity
|
||||||
|
|
||||||
@@ -528,3 +532,62 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
|||||||
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||||
if not self.native_unit_of_measurement:
|
if not self.native_unit_of_measurement:
|
||||||
self._attr_native_unit_of_measurement = inferred_unit
|
self._attr_native_unit_of_measurement = inferred_unit
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Handle when entity is added to Home Assistant.
|
||||||
|
|
||||||
|
If this is a deprecated sensor entity, create a repair issue to guide
|
||||||
|
the user to disable it.
|
||||||
|
"""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
|
||||||
|
if not self.enabled:
|
||||||
|
return
|
||||||
|
|
||||||
|
reason = DEPRECATED_SENSORS.get(self.entity_description.key)
|
||||||
|
if not reason:
|
||||||
|
return
|
||||||
|
|
||||||
|
automations = automations_with_entity(self.hass, self.entity_id)
|
||||||
|
scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||||
|
if not automations and not scripts:
|
||||||
|
return
|
||||||
|
|
||||||
|
entity_registry = er.async_get(self.hass)
|
||||||
|
items = [
|
||||||
|
f"- [{entry.name or entry.original_name or entity_id}]"
|
||||||
|
f"(/config/{integration}/edit/{entry.unique_id or entity_id.split('.', 1)[-1]})"
|
||||||
|
for integration, entities in (
|
||||||
|
("automation", automations),
|
||||||
|
("script", scripts),
|
||||||
|
)
|
||||||
|
for entity_id in entities
|
||||||
|
if (entry := entity_registry.async_get(entity_id))
|
||||||
|
]
|
||||||
|
placeholders = {
|
||||||
|
"entity_name": str(self.name or self.entity_id),
|
||||||
|
"entity_id": self.entity_id,
|
||||||
|
"items": "\n".join(items),
|
||||||
|
}
|
||||||
|
if via_attr := AVAILABLE_VIA_DEVICE_ATTR.get(self.entity_description.key):
|
||||||
|
placeholders["available_via_device_attr"] = via_attr
|
||||||
|
if device_entry := self.device_entry:
|
||||||
|
placeholders["device_id"] = device_entry.id
|
||||||
|
|
||||||
|
ir.async_create_issue(
|
||||||
|
self.hass,
|
||||||
|
DOMAIN,
|
||||||
|
f"{reason}_{self.entity_id}",
|
||||||
|
breaks_in_ha_version="2026.6.0",
|
||||||
|
is_fixable=False,
|
||||||
|
severity=ir.IssueSeverity.WARNING,
|
||||||
|
translation_key=reason,
|
||||||
|
translation_placeholders=placeholders,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_will_remove_from_hass(self) -> None:
|
||||||
|
"""Handle when entity will be removed from Home Assistant."""
|
||||||
|
await super().async_will_remove_from_hass()
|
||||||
|
|
||||||
|
if issue_key := DEPRECATED_SENSORS.get(self.entity_description.key):
|
||||||
|
ir.async_delete_issue(self.hass, DOMAIN, f"{issue_key}_{self.entity_id}")
|
||||||
|
|||||||
@@ -241,5 +241,19 @@
|
|||||||
"cannot_connect": {
|
"cannot_connect": {
|
||||||
"message": "Cannot connect to APC UPS Daemon."
|
"message": "Cannot connect to APC UPS Daemon."
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"issues": {
|
||||||
|
"apc_deprecated": {
|
||||||
|
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because it exposes internal details of the APC UPS Daemon response.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use supported APC UPS entities instead. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||||
|
"title": "{entity_name} sensor is deprecated"
|
||||||
|
},
|
||||||
|
"available_via_device_info": {
|
||||||
|
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the same value is available from the device registry via `device_attr(\"{device_id}\", \"{available_via_device_attr}\")`.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use the `device_attr` helper instead of this sensor. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||||
|
"title": "{entity_name} sensor is deprecated"
|
||||||
|
},
|
||||||
|
"date_deprecated": {
|
||||||
|
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the timestamp is already available from other APC UPS sensors via their last updated time.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to reference any entity's `last_updated` attribute instead (for example, `states.binary_sensor.apcups_online_status.last_updated`). Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||||
|
"title": "{entity_name} sensor is deprecated"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -111,8 +111,6 @@ def handle_errors_and_zip[_AsusWrtBridgeT: AsusWrtBridge](
|
|||||||
|
|
||||||
if isinstance(data, dict):
|
if isinstance(data, dict):
|
||||||
return dict(zip(keys, list(data.values()), strict=False))
|
return dict(zip(keys, list(data.values()), strict=False))
|
||||||
if not isinstance(data, (list, tuple)):
|
|
||||||
raise UpdateFailed("Received invalid data type")
|
|
||||||
return dict(zip(keys, data, strict=False))
|
return dict(zip(keys, data, strict=False))
|
||||||
|
|
||||||
return _wrapper
|
return _wrapper
|
||||||
|
|||||||
@@ -14,10 +14,11 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
from homeassistant.helpers import (
|
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||||
config_entry_oauth2_flow,
|
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||||
device_registry as dr,
|
ImplementationUnavailableError,
|
||||||
issue_registry as ir,
|
OAuth2Session,
|
||||||
|
async_get_config_entry_implementation,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
||||||
@@ -37,14 +38,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
|
|||||||
|
|
||||||
session = async_create_august_clientsession(hass)
|
session = async_create_august_clientsession(hass)
|
||||||
try:
|
try:
|
||||||
implementation = (
|
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
except ImplementationUnavailableError as err:
|
||||||
hass, entry
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except ValueError as err:
|
|
||||||
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
||||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
oauth_session = OAuth2Session(hass, entry, implementation)
|
||||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||||
try:
|
try:
|
||||||
await async_setup_august(hass, entry, august_gateway)
|
await async_setup_august(hass, entry, august_gateway)
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["avea"],
|
"loggers": ["avea"],
|
||||||
"quality_scale": "legacy",
|
"quality_scale": "legacy",
|
||||||
"requirements": ["avea==1.5.1"]
|
"requirements": ["avea==1.6.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/awair",
|
"documentation": "https://www.home-assistant.io/integrations/awair",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["python_awair"],
|
"loggers": ["python_awair"],
|
||||||
"requirements": ["python-awair==0.2.4"],
|
"requirements": ["python-awair==0.2.5"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"name": "awair*",
|
"name": "awair*",
|
||||||
|
|||||||
116
homeassistant/components/backblaze_b2/__init__.py
Normal file
116
homeassistant/components/backblaze_b2/__init__.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
"""The Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from b2sdk.v2 import B2Api, Bucket, InMemoryAccountInfo, exception
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
BACKBLAZE_REALM,
|
||||||
|
CONF_APPLICATION_KEY,
|
||||||
|
CONF_BUCKET,
|
||||||
|
CONF_KEY_ID,
|
||||||
|
DATA_BACKUP_AGENT_LISTENERS,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
|
from .repairs import (
|
||||||
|
async_check_for_repair_issues,
|
||||||
|
create_bucket_access_restricted_issue,
|
||||||
|
create_bucket_not_found_issue,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type BackblazeConfigEntry = ConfigEntry[Bucket]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||||
|
"""Set up Backblaze B2 from a config entry."""
|
||||||
|
|
||||||
|
info = InMemoryAccountInfo()
|
||||||
|
b2_api = B2Api(info)
|
||||||
|
|
||||||
|
def _authorize_and_get_bucket_sync() -> Bucket:
|
||||||
|
"""Synchronously authorize the Backblaze B2 account and retrieve the bucket.
|
||||||
|
|
||||||
|
This function runs in the event loop's executor as b2sdk operations are blocking.
|
||||||
|
"""
|
||||||
|
b2_api.authorize_account(
|
||||||
|
BACKBLAZE_REALM,
|
||||||
|
entry.data[CONF_KEY_ID],
|
||||||
|
entry.data[CONF_APPLICATION_KEY],
|
||||||
|
)
|
||||||
|
return b2_api.get_bucket_by_name(entry.data[CONF_BUCKET])
|
||||||
|
|
||||||
|
try:
|
||||||
|
bucket = await hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||||
|
except exception.Unauthorized as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="invalid_credentials",
|
||||||
|
) from err
|
||||||
|
except exception.RestrictedBucket as err:
|
||||||
|
create_bucket_access_restricted_issue(hass, entry, err.bucket_name)
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="restricted_bucket",
|
||||||
|
translation_placeholders={
|
||||||
|
"restricted_bucket_name": err.bucket_name,
|
||||||
|
},
|
||||||
|
) from err
|
||||||
|
except exception.NonExistentBucket as err:
|
||||||
|
create_bucket_not_found_issue(hass, entry, entry.data[CONF_BUCKET])
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="invalid_bucket_name",
|
||||||
|
) from err
|
||||||
|
except exception.ConnectionReset as err:
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="cannot_connect",
|
||||||
|
) from err
|
||||||
|
except exception.MissingAccountData as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="invalid_auth",
|
||||||
|
) from err
|
||||||
|
|
||||||
|
entry.runtime_data = bucket
|
||||||
|
|
||||||
|
def _async_notify_backup_listeners() -> None:
|
||||||
|
"""Notify any registered backup agent listeners."""
|
||||||
|
_LOGGER.debug("Notifying backup listeners for entry %s", entry.entry_id)
|
||||||
|
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||||
|
listener()
|
||||||
|
|
||||||
|
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
|
||||||
|
|
||||||
|
async def _periodic_issue_check(_now: Any) -> None:
|
||||||
|
"""Periodically check for repair issues."""
|
||||||
|
await async_check_for_repair_issues(hass, entry)
|
||||||
|
|
||||||
|
entry.async_on_unload(
|
||||||
|
async_track_time_interval(hass, _periodic_issue_check, timedelta(minutes=30))
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.async_create_task(async_check_for_repair_issues(hass, entry))
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||||
|
"""Unload a Backblaze B2 config entry.
|
||||||
|
|
||||||
|
Any resources directly managed by this entry that need explicit shutdown
|
||||||
|
would be handled here. In this case, the `async_on_state_change` listener
|
||||||
|
handles the notification logic on unload.
|
||||||
|
"""
|
||||||
|
return True
|
||||||
615
homeassistant/components/backblaze_b2/backup.py
Normal file
615
homeassistant/components/backblaze_b2/backup.py
Normal file
@@ -0,0 +1,615 @@
|
|||||||
|
"""Backup platform for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import mimetypes
|
||||||
|
from time import time
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from b2sdk.v2 import FileVersion
|
||||||
|
from b2sdk.v2.exception import B2Error
|
||||||
|
|
||||||
|
from homeassistant.components.backup import (
|
||||||
|
AgentBackup,
|
||||||
|
BackupAgent,
|
||||||
|
BackupAgentError,
|
||||||
|
BackupNotFound,
|
||||||
|
suggested_filename,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.util.async_iterator import AsyncIteratorReader
|
||||||
|
|
||||||
|
from . import BackblazeConfigEntry
|
||||||
|
from .const import (
|
||||||
|
CONF_PREFIX,
|
||||||
|
DATA_BACKUP_AGENT_LISTENERS,
|
||||||
|
DOMAIN,
|
||||||
|
METADATA_FILE_SUFFIX,
|
||||||
|
METADATA_VERSION,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Cache TTL for backup list (in seconds)
|
||||||
|
CACHE_TTL = 300
|
||||||
|
|
||||||
|
|
||||||
|
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||||
|
"""Return the suggested filenames for the backup and metadata files."""
|
||||||
|
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||||
|
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_metadata(raw_content: str) -> dict[str, Any]:
|
||||||
|
"""Parse metadata content from JSON."""
|
||||||
|
try:
|
||||||
|
data = json.loads(raw_content)
|
||||||
|
except json.JSONDecodeError as err:
|
||||||
|
raise ValueError(f"Invalid JSON format: {err}") from err
|
||||||
|
else:
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise TypeError("JSON content is not a dictionary")
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _find_backup_file_for_metadata(
|
||||||
|
metadata_filename: str, all_files: dict[str, FileVersion], prefix: str
|
||||||
|
) -> FileVersion | None:
|
||||||
|
"""Find corresponding backup file for metadata file."""
|
||||||
|
base_name = metadata_filename[len(prefix) :].removesuffix(METADATA_FILE_SUFFIX)
|
||||||
|
return next(
|
||||||
|
(
|
||||||
|
file
|
||||||
|
for name, file in all_files.items()
|
||||||
|
if name.startswith(prefix + base_name)
|
||||||
|
and name.endswith(".tar")
|
||||||
|
and name != metadata_filename
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_backup_from_metadata(
|
||||||
|
metadata_content: dict[str, Any], backup_file: FileVersion
|
||||||
|
) -> AgentBackup:
|
||||||
|
"""Construct an AgentBackup from parsed metadata content and the associated backup file."""
|
||||||
|
metadata = metadata_content["backup_metadata"]
|
||||||
|
metadata["size"] = backup_file.size
|
||||||
|
return AgentBackup.from_dict(metadata)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_b2_errors[T](
|
||||||
|
func: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
) -> Callable[..., Coroutine[Any, Any, T]]:
|
||||||
|
"""Handle B2Errors by converting them to BackupAgentError."""
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||||
|
"""Catch B2Error and raise BackupAgentError."""
|
||||||
|
try:
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
except B2Error as err:
|
||||||
|
error_msg = f"Failed during {func.__name__}"
|
||||||
|
raise BackupAgentError(error_msg) from err
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_backup_agents(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
) -> list[BackupAgent]:
|
||||||
|
"""Return a list of backup agents for all configured Backblaze B2 entries."""
|
||||||
|
entries: list[BackblazeConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||||
|
DOMAIN
|
||||||
|
)
|
||||||
|
return [BackblazeBackupAgent(hass, entry) for entry in entries]
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_register_backup_agents_listener(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
*,
|
||||||
|
listener: Callable[[], None],
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Callable[[], None]:
|
||||||
|
"""Register a listener to be called when backup agents are added or removed.
|
||||||
|
|
||||||
|
:return: A function to unregister the listener.
|
||||||
|
"""
|
||||||
|
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def remove_listener() -> None:
|
||||||
|
"""Remove the listener."""
|
||||||
|
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||||
|
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||||
|
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS, None)
|
||||||
|
|
||||||
|
return remove_listener
|
||||||
|
|
||||||
|
|
||||||
|
class BackblazeBackupAgent(BackupAgent):
|
||||||
|
"""Backup agent for Backblaze B2 cloud storage."""
|
||||||
|
|
||||||
|
domain = DOMAIN
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, entry: BackblazeConfigEntry) -> None:
|
||||||
|
"""Initialize the Backblaze B2 agent."""
|
||||||
|
super().__init__()
|
||||||
|
self._hass = hass
|
||||||
|
self._bucket = entry.runtime_data
|
||||||
|
self._prefix = entry.data[CONF_PREFIX]
|
||||||
|
|
||||||
|
self.name = entry.title
|
||||||
|
self.unique_id = entry.entry_id
|
||||||
|
|
||||||
|
self._all_files_cache: dict[str, FileVersion] = {}
|
||||||
|
self._all_files_cache_expiration: float = 0.0
|
||||||
|
self._backup_list_cache: dict[str, AgentBackup] = {}
|
||||||
|
self._backup_list_cache_expiration: float = 0.0
|
||||||
|
|
||||||
|
self._all_files_cache_lock = asyncio.Lock()
|
||||||
|
self._backup_list_cache_lock = asyncio.Lock()
|
||||||
|
|
||||||
|
def _is_cache_valid(self, expiration_time: float) -> bool:
|
||||||
|
"""Check if cache is still valid based on expiration time."""
|
||||||
|
return time() <= expiration_time
|
||||||
|
|
||||||
|
async def _cleanup_failed_upload(self, filename: str) -> None:
|
||||||
|
"""Clean up a partially uploaded file after upload failure."""
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Attempting to delete partially uploaded main backup file %s "
|
||||||
|
"due to metadata upload failure",
|
||||||
|
filename,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
uploaded_main_file_info = await self._hass.async_add_executor_job(
|
||||||
|
self._bucket.get_file_info_by_name, filename
|
||||||
|
)
|
||||||
|
await self._hass.async_add_executor_job(uploaded_main_file_info.delete)
|
||||||
|
except B2Error:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Failed to clean up partially uploaded main backup file %s. "
|
||||||
|
"Manual intervention may be required to delete it from Backblaze B2",
|
||||||
|
filename,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Successfully deleted partially uploaded main backup file %s", filename
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _get_file_for_download(self, backup_id: str) -> FileVersion:
|
||||||
|
"""Get backup file for download, raising if not found."""
|
||||||
|
file, _ = await self._find_file_and_metadata_version_by_id(backup_id)
|
||||||
|
if not file:
|
||||||
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
return file
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_download_backup(
|
||||||
|
self, backup_id: str, **kwargs: Any
|
||||||
|
) -> AsyncIterator[bytes]:
|
||||||
|
"""Download a backup from Backblaze B2."""
|
||||||
|
file = await self._get_file_for_download(backup_id)
|
||||||
|
_LOGGER.debug("Downloading %s", file.file_name)
|
||||||
|
|
||||||
|
downloaded_file = await self._hass.async_add_executor_job(file.download)
|
||||||
|
response = downloaded_file.response
|
||||||
|
|
||||||
|
async def stream_response() -> AsyncIterator[bytes]:
|
||||||
|
"""Stream the response into an AsyncIterator."""
|
||||||
|
try:
|
||||||
|
iterator = response.iter_content(chunk_size=1024 * 1024)
|
||||||
|
while True:
|
||||||
|
chunk = await self._hass.async_add_executor_job(
|
||||||
|
next, iterator, None
|
||||||
|
)
|
||||||
|
if chunk is None:
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
finally:
|
||||||
|
_LOGGER.debug("Finished streaming download for %s", file.file_name)
|
||||||
|
|
||||||
|
return stream_response()
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_upload_backup(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||||
|
backup: AgentBackup,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
"""Upload a backup to Backblaze B2.
|
||||||
|
|
||||||
|
This involves uploading the main backup archive and a separate metadata JSON file.
|
||||||
|
"""
|
||||||
|
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||||
|
prefixed_tar_filename = self._prefix + tar_filename
|
||||||
|
prefixed_metadata_filename = self._prefix + metadata_filename
|
||||||
|
|
||||||
|
metadata_content_bytes = json.dumps(
|
||||||
|
{
|
||||||
|
"metadata_version": METADATA_VERSION,
|
||||||
|
"backup_id": backup.backup_id,
|
||||||
|
"backup_metadata": backup.as_dict(),
|
||||||
|
}
|
||||||
|
).encode("utf-8")
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Uploading backup: %s, and metadata: %s",
|
||||||
|
prefixed_tar_filename,
|
||||||
|
prefixed_metadata_filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
upload_successful = False
|
||||||
|
try:
|
||||||
|
await self._upload_backup_file(prefixed_tar_filename, open_stream, {})
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Main backup file upload finished for %s", prefixed_tar_filename
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.debug("Uploading metadata file: %s", prefixed_metadata_filename)
|
||||||
|
await self._upload_metadata_file(
|
||||||
|
metadata_content_bytes, prefixed_metadata_filename
|
||||||
|
)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Metadata file upload finished for %s", prefixed_metadata_filename
|
||||||
|
)
|
||||||
|
upload_successful = True
|
||||||
|
finally:
|
||||||
|
if upload_successful:
|
||||||
|
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
|
||||||
|
self._invalidate_caches(
|
||||||
|
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await self._cleanup_failed_upload(prefixed_tar_filename)
|
||||||
|
|
||||||
|
def _upload_metadata_file_sync(
|
||||||
|
self, metadata_content: bytes, filename: str
|
||||||
|
) -> None:
|
||||||
|
"""Synchronously upload metadata file to B2."""
|
||||||
|
self._bucket.upload_bytes(
|
||||||
|
metadata_content,
|
||||||
|
filename,
|
||||||
|
content_type="application/json",
|
||||||
|
file_info={"metadata_only": "true"},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _upload_metadata_file(
|
||||||
|
self, metadata_content: bytes, filename: str
|
||||||
|
) -> None:
|
||||||
|
"""Upload metadata file to B2."""
|
||||||
|
await self._hass.async_add_executor_job(
|
||||||
|
self._upload_metadata_file_sync,
|
||||||
|
metadata_content,
|
||||||
|
filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _upload_unbound_stream_sync(
|
||||||
|
self,
|
||||||
|
reader: AsyncIteratorReader,
|
||||||
|
filename: str,
|
||||||
|
content_type: str,
|
||||||
|
file_info: dict[str, Any],
|
||||||
|
) -> FileVersion:
|
||||||
|
"""Synchronously upload unbound stream to B2."""
|
||||||
|
return self._bucket.upload_unbound_stream(
|
||||||
|
reader,
|
||||||
|
filename,
|
||||||
|
content_type=content_type,
|
||||||
|
file_info=file_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _download_and_parse_metadata_sync(
|
||||||
|
self, metadata_file_version: FileVersion
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Synchronously download and parse metadata file."""
|
||||||
|
return _parse_metadata(
|
||||||
|
metadata_file_version.download().response.content.decode("utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _upload_backup_file(
|
||||||
|
self,
|
||||||
|
filename: str,
|
||||||
|
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||||
|
file_info: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
"""Upload backup file to B2 using streaming."""
|
||||||
|
_LOGGER.debug("Starting streaming upload for %s", filename)
|
||||||
|
|
||||||
|
stream = await open_stream()
|
||||||
|
reader = AsyncIteratorReader(self._hass.loop, stream)
|
||||||
|
|
||||||
|
_LOGGER.debug("Uploading backup file %s with streaming", filename)
|
||||||
|
try:
|
||||||
|
content_type, _ = mimetypes.guess_type(filename)
|
||||||
|
file_version = await self._hass.async_add_executor_job(
|
||||||
|
self._upload_unbound_stream_sync,
|
||||||
|
reader,
|
||||||
|
filename,
|
||||||
|
content_type or "application/x-tar",
|
||||||
|
file_info,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
reader.close()
|
||||||
|
|
||||||
|
_LOGGER.debug("Successfully uploaded %s (ID: %s)", filename, file_version.id_)
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||||
|
"""Delete a backup and its associated metadata file from Backblaze B2."""
|
||||||
|
file, metadata_file = await self._find_file_and_metadata_version_by_id(
|
||||||
|
backup_id
|
||||||
|
)
|
||||||
|
if not file:
|
||||||
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
|
||||||
|
# Invariant: when file is not None, metadata_file is also not None
|
||||||
|
assert metadata_file is not None
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Deleting backup file: %s and metadata file: %s",
|
||||||
|
file.file_name,
|
||||||
|
metadata_file.file_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._hass.async_add_executor_job(file.delete)
|
||||||
|
await self._hass.async_add_executor_job(metadata_file.delete)
|
||||||
|
|
||||||
|
self._invalidate_caches(
|
||||||
|
backup_id,
|
||||||
|
file.file_name,
|
||||||
|
metadata_file.file_name,
|
||||||
|
remove_files=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||||
|
"""List all backups by finding their associated metadata files in Backblaze B2."""
|
||||||
|
async with self._backup_list_cache_lock:
|
||||||
|
if self._backup_list_cache and self._is_cache_valid(
|
||||||
|
self._backup_list_cache_expiration
|
||||||
|
):
|
||||||
|
_LOGGER.debug("Returning backups from cache")
|
||||||
|
return list(self._backup_list_cache.values())
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Cache expired or empty, fetching all files from B2 to build backup list"
|
||||||
|
)
|
||||||
|
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Files found in prefix '%s': %s",
|
||||||
|
self._prefix,
|
||||||
|
list(all_files_in_prefix.keys()),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process metadata files sequentially to avoid exhausting executor pool
|
||||||
|
backups = {}
|
||||||
|
for file_name, file_version in all_files_in_prefix.items():
|
||||||
|
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||||
|
backup = await self._hass.async_add_executor_job(
|
||||||
|
self._process_metadata_file_sync,
|
||||||
|
file_name,
|
||||||
|
file_version,
|
||||||
|
all_files_in_prefix,
|
||||||
|
)
|
||||||
|
if backup:
|
||||||
|
backups[backup.backup_id] = backup
|
||||||
|
self._backup_list_cache = backups
|
||||||
|
self._backup_list_cache_expiration = time() + CACHE_TTL
|
||||||
|
|
||||||
|
return list(backups.values())
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup:
|
||||||
|
"""Get a specific backup by its ID from Backblaze B2."""
|
||||||
|
if self._backup_list_cache and self._is_cache_valid(
|
||||||
|
self._backup_list_cache_expiration
|
||||||
|
):
|
||||||
|
if backup := self._backup_list_cache.get(backup_id):
|
||||||
|
_LOGGER.debug("Returning backup %s from cache", backup_id)
|
||||||
|
return backup
|
||||||
|
|
||||||
|
file, metadata_file_version = await self._find_file_and_metadata_version_by_id(
|
||||||
|
backup_id
|
||||||
|
)
|
||||||
|
if not file or not metadata_file_version:
|
||||||
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
|
||||||
|
metadata_content = await self._hass.async_add_executor_job(
|
||||||
|
self._download_and_parse_metadata_sync,
|
||||||
|
metadata_file_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Successfully retrieved metadata for backup ID %s from file %s",
|
||||||
|
backup_id,
|
||||||
|
metadata_file_version.file_name,
|
||||||
|
)
|
||||||
|
backup = _create_backup_from_metadata(metadata_content, file)
|
||||||
|
|
||||||
|
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||||
|
self._backup_list_cache[backup.backup_id] = backup
|
||||||
|
|
||||||
|
return backup
|
||||||
|
|
||||||
|
async def _find_file_and_metadata_version_by_id(
|
||||||
|
self, backup_id: str
|
||||||
|
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||||
|
"""Find the main backup file and its associated metadata file version by backup ID."""
|
||||||
|
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||||
|
|
||||||
|
# Process metadata files sequentially to avoid exhausting executor pool
|
||||||
|
for file_name, file_version in all_files_in_prefix.items():
|
||||||
|
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||||
|
(
|
||||||
|
result_backup_file,
|
||||||
|
result_metadata_file_version,
|
||||||
|
) = await self._hass.async_add_executor_job(
|
||||||
|
self._process_metadata_file_for_id_sync,
|
||||||
|
file_name,
|
||||||
|
file_version,
|
||||||
|
backup_id,
|
||||||
|
all_files_in_prefix,
|
||||||
|
)
|
||||||
|
if result_backup_file and result_metadata_file_version:
|
||||||
|
return result_backup_file, result_metadata_file_version
|
||||||
|
|
||||||
|
_LOGGER.debug("Backup %s not found", backup_id)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def _process_metadata_file_for_id_sync(
|
||||||
|
self,
|
||||||
|
file_name: str,
|
||||||
|
file_version: FileVersion,
|
||||||
|
target_backup_id: str,
|
||||||
|
all_files_in_prefix: dict[str, FileVersion],
|
||||||
|
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||||
|
"""Synchronously process a single metadata file for a specific backup ID.
|
||||||
|
|
||||||
|
Called within a thread pool executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
download_response = file_version.download().response
|
||||||
|
except B2Error as err:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Failed to download metadata file %s during ID search: %s",
|
||||||
|
file_name,
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
try:
|
||||||
|
metadata_content = _parse_metadata(
|
||||||
|
download_response.content.decode("utf-8")
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
if metadata_content["backup_id"] != target_backup_id:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Metadata file %s does not match target backup ID %s",
|
||||||
|
file_name,
|
||||||
|
target_backup_id,
|
||||||
|
)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
found_backup_file = _find_backup_file_for_metadata(
|
||||||
|
file_name, all_files_in_prefix, self._prefix
|
||||||
|
)
|
||||||
|
if not found_backup_file:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Found metadata file %s for backup ID %s, but no corresponding backup file",
|
||||||
|
file_name,
|
||||||
|
target_backup_id,
|
||||||
|
)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Found backup file %s and metadata file %s for ID %s",
|
||||||
|
found_backup_file.file_name,
|
||||||
|
file_name,
|
||||||
|
target_backup_id,
|
||||||
|
)
|
||||||
|
return found_backup_file, file_version
|
||||||
|
|
||||||
|
async def _get_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||||
|
"""Get all file versions in the configured prefix from Backblaze B2.
|
||||||
|
|
||||||
|
Uses a cache to minimize API calls.
|
||||||
|
|
||||||
|
This fetches a flat list of all files, including main backups and metadata files.
|
||||||
|
"""
|
||||||
|
async with self._all_files_cache_lock:
|
||||||
|
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||||
|
_LOGGER.debug("Returning all files from cache")
|
||||||
|
return self._all_files_cache
|
||||||
|
|
||||||
|
_LOGGER.debug("Cache for all files expired or empty, fetching from B2")
|
||||||
|
all_files_in_prefix = await self._hass.async_add_executor_job(
|
||||||
|
self._fetch_all_files_in_prefix
|
||||||
|
)
|
||||||
|
self._all_files_cache = all_files_in_prefix
|
||||||
|
self._all_files_cache_expiration = time() + CACHE_TTL
|
||||||
|
return all_files_in_prefix
|
||||||
|
|
||||||
|
def _fetch_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||||
|
"""Fetch all files in the configured prefix from B2."""
|
||||||
|
all_files: dict[str, FileVersion] = {}
|
||||||
|
for file, _ in self._bucket.ls(self._prefix):
|
||||||
|
all_files[file.file_name] = file
|
||||||
|
return all_files
|
||||||
|
|
||||||
|
def _process_metadata_file_sync(
|
||||||
|
self,
|
||||||
|
file_name: str,
|
||||||
|
file_version: FileVersion,
|
||||||
|
all_files_in_prefix: dict[str, FileVersion],
|
||||||
|
) -> AgentBackup | None:
|
||||||
|
"""Synchronously process a single metadata file and return an AgentBackup if valid."""
|
||||||
|
try:
|
||||||
|
download_response = file_version.download().response
|
||||||
|
except B2Error as err:
|
||||||
|
_LOGGER.warning("Failed to download metadata file %s: %s", file_name, err)
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
metadata_content = _parse_metadata(
|
||||||
|
download_response.content.decode("utf-8")
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
found_backup_file = _find_backup_file_for_metadata(
|
||||||
|
file_name, all_files_in_prefix, self._prefix
|
||||||
|
)
|
||||||
|
if not found_backup_file:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Found metadata file %s but no corresponding backup file",
|
||||||
|
file_name,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Successfully processed metadata file %s for backup ID %s",
|
||||||
|
file_name,
|
||||||
|
metadata_content["backup_id"],
|
||||||
|
)
|
||||||
|
return _create_backup_from_metadata(metadata_content, found_backup_file)
|
||||||
|
|
||||||
|
def _invalidate_caches(
|
||||||
|
self,
|
||||||
|
backup_id: str,
|
||||||
|
tar_filename: str,
|
||||||
|
metadata_filename: str | None,
|
||||||
|
*,
|
||||||
|
remove_files: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""Invalidate caches after upload/deletion operations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
backup_id: The backup ID to remove from backup cache
|
||||||
|
tar_filename: The tar filename to remove from files cache
|
||||||
|
metadata_filename: The metadata filename to remove from files cache
|
||||||
|
remove_files: If True, remove specific files from cache; if False, expire entire cache
|
||||||
|
"""
|
||||||
|
if remove_files:
|
||||||
|
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||||
|
self._all_files_cache.pop(tar_filename, None)
|
||||||
|
if metadata_filename:
|
||||||
|
self._all_files_cache.pop(metadata_filename, None)
|
||||||
|
|
||||||
|
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||||
|
self._backup_list_cache.pop(backup_id, None)
|
||||||
|
else:
|
||||||
|
# For uploads, we can't easily add new FileVersion objects without API calls,
|
||||||
|
# so we expire the entire cache for simplicity
|
||||||
|
self._all_files_cache_expiration = 0.0
|
||||||
|
self._backup_list_cache_expiration = 0.0
|
||||||
288
homeassistant/components/backblaze_b2/config_flow.py
Normal file
288
homeassistant/components/backblaze_b2/config_flow.py
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
"""Config flow for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from b2sdk.v2 import B2Api, InMemoryAccountInfo, exception
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.selector import (
|
||||||
|
TextSelector,
|
||||||
|
TextSelectorConfig,
|
||||||
|
TextSelectorType,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
BACKBLAZE_REALM,
|
||||||
|
CONF_APPLICATION_KEY,
|
||||||
|
CONF_BUCKET,
|
||||||
|
CONF_KEY_ID,
|
||||||
|
CONF_PREFIX,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
REQUIRED_CAPABILITIES = {"writeFiles", "listFiles", "deleteFiles", "readFiles"}
|
||||||
|
|
||||||
|
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_KEY_ID): cv.string,
|
||||||
|
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||||
|
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||||
|
),
|
||||||
|
vol.Required(CONF_BUCKET): cv.string,
|
||||||
|
vol.Optional(CONF_PREFIX, default=""): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Backblaze B2."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
|
||||||
|
reauth_entry: ConfigEntry[Any] | None
|
||||||
|
|
||||||
|
def _abort_if_duplicate_credentials(self, user_input: dict[str, Any]) -> None:
|
||||||
|
"""Abort if credentials already exist in another entry."""
|
||||||
|
self._async_abort_entries_match(
|
||||||
|
{
|
||||||
|
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||||
|
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle a flow initiated by the user."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
placeholders: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self._abort_if_duplicate_credentials(user_input)
|
||||||
|
|
||||||
|
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||||
|
user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||||
|
"/"
|
||||||
|
):
|
||||||
|
user_input[CONF_PREFIX] += "/"
|
||||||
|
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=user_input[CONF_BUCKET], data=user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
STEP_USER_DATA_SCHEMA, user_input
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_validate_backblaze_connection(
|
||||||
|
self, user_input: dict[str, Any]
|
||||||
|
) -> tuple[dict[str, str], dict[str, str]]:
|
||||||
|
"""Validate Backblaze B2 credentials, bucket, capabilities, and prefix.
|
||||||
|
|
||||||
|
Returns a tuple of (errors_dict, placeholders_dict).
|
||||||
|
"""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
placeholders: dict[str, str] = {}
|
||||||
|
|
||||||
|
info = InMemoryAccountInfo()
|
||||||
|
b2_api = B2Api(info)
|
||||||
|
|
||||||
|
def _authorize_and_get_bucket_sync() -> None:
|
||||||
|
"""Synchronously authorize the account and get the bucket by name.
|
||||||
|
|
||||||
|
This function is run in the executor because b2sdk operations are blocking.
|
||||||
|
"""
|
||||||
|
b2_api.authorize_account(
|
||||||
|
BACKBLAZE_REALM, # Use the defined realm constant
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
user_input[CONF_APPLICATION_KEY],
|
||||||
|
)
|
||||||
|
b2_api.get_bucket_by_name(user_input[CONF_BUCKET])
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||||
|
|
||||||
|
allowed = b2_api.account_info.get_allowed()
|
||||||
|
|
||||||
|
# Check if allowed info is available
|
||||||
|
if allowed is None or not allowed.get("capabilities"):
|
||||||
|
errors["base"] = "invalid_capability"
|
||||||
|
placeholders["missing_capabilities"] = ", ".join(
|
||||||
|
sorted(REQUIRED_CAPABILITIES)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Check if all required capabilities are present
|
||||||
|
current_caps = set(allowed["capabilities"])
|
||||||
|
if not REQUIRED_CAPABILITIES.issubset(current_caps):
|
||||||
|
missing_caps = REQUIRED_CAPABILITIES - current_caps
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Missing required Backblaze B2 capabilities for Key ID '%s': %s",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
", ".join(sorted(missing_caps)),
|
||||||
|
)
|
||||||
|
errors["base"] = "invalid_capability"
|
||||||
|
placeholders["missing_capabilities"] = ", ".join(
|
||||||
|
sorted(missing_caps)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Only check prefix if capabilities are valid
|
||||||
|
configured_prefix: str = user_input[CONF_PREFIX]
|
||||||
|
allowed_prefix = allowed.get("namePrefix") or ""
|
||||||
|
# Ensure configured prefix starts with Backblaze B2's allowed prefix
|
||||||
|
if allowed_prefix and not configured_prefix.startswith(
|
||||||
|
allowed_prefix
|
||||||
|
):
|
||||||
|
errors[CONF_PREFIX] = "invalid_prefix"
|
||||||
|
placeholders["allowed_prefix"] = allowed_prefix
|
||||||
|
|
||||||
|
except exception.Unauthorized:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Backblaze B2 authentication failed for Key ID '%s'",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
)
|
||||||
|
errors["base"] = "invalid_credentials"
|
||||||
|
except exception.RestrictedBucket as err:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Access to Backblaze B2 bucket '%s' is restricted: %s",
|
||||||
|
user_input[CONF_BUCKET],
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
placeholders["restricted_bucket_name"] = err.bucket_name
|
||||||
|
errors[CONF_BUCKET] = "restricted_bucket"
|
||||||
|
except exception.NonExistentBucket:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
|
||||||
|
)
|
||||||
|
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||||
|
except exception.ConnectionReset:
|
||||||
|
_LOGGER.error("Failed to connect to Backblaze B2. Connection reset")
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except exception.MissingAccountData:
|
||||||
|
# This generally indicates an issue with how InMemoryAccountInfo is used
|
||||||
|
_LOGGER.error(
|
||||||
|
"Missing account data during Backblaze B2 authorization for Key ID '%s'",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
)
|
||||||
|
errors["base"] = "invalid_credentials"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception(
|
||||||
|
"An unexpected error occurred during Backblaze B2 configuration for Key ID '%s'",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
)
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
|
||||||
|
return errors, placeholders
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauthentication flow."""
|
||||||
|
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||||
|
self.context["entry_id"]
|
||||||
|
)
|
||||||
|
assert self.reauth_entry is not None
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Confirm reauthentication."""
|
||||||
|
assert self.reauth_entry is not None
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
placeholders: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self._abort_if_duplicate_credentials(user_input)
|
||||||
|
|
||||||
|
validation_input = {
|
||||||
|
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||||
|
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||||
|
CONF_BUCKET: self.reauth_entry.data[CONF_BUCKET],
|
||||||
|
CONF_PREFIX: self.reauth_entry.data[CONF_PREFIX],
|
||||||
|
}
|
||||||
|
|
||||||
|
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||||
|
validation_input
|
||||||
|
)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self.reauth_entry,
|
||||||
|
data_updates={
|
||||||
|
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||||
|
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_KEY_ID): cv.string,
|
||||||
|
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||||
|
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||||
|
),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders={
|
||||||
|
"brand_name": "Backblaze B2",
|
||||||
|
"bucket": self.reauth_entry.data[CONF_BUCKET],
|
||||||
|
**placeholders,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_reconfigure(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reconfiguration flow."""
|
||||||
|
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||||
|
assert entry is not None
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self._abort_if_duplicate_credentials(user_input)
|
||||||
|
|
||||||
|
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||||
|
user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||||
|
"/"
|
||||||
|
):
|
||||||
|
user_input[CONF_PREFIX] += "/"
|
||||||
|
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
entry,
|
||||||
|
data_updates=user_input,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
errors = {}
|
||||||
|
placeholders = {}
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reconfigure",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
STEP_USER_DATA_SCHEMA, user_input or entry.data
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||||
|
)
|
||||||
22
homeassistant/components/backblaze_b2/const.py
Normal file
22
homeassistant/components/backblaze_b2/const.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"""Constants for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from homeassistant.util.hass_dict import HassKey
|
||||||
|
|
||||||
|
DOMAIN: Final = "backblaze_b2"
|
||||||
|
|
||||||
|
CONF_KEY_ID = "key_id"
|
||||||
|
CONF_APPLICATION_KEY = "application_key"
|
||||||
|
CONF_BUCKET = "bucket"
|
||||||
|
CONF_PREFIX = "prefix"
|
||||||
|
|
||||||
|
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||||
|
f"{DOMAIN}.backup_agent_listeners"
|
||||||
|
)
|
||||||
|
|
||||||
|
METADATA_FILE_SUFFIX = ".metadata.json"
|
||||||
|
METADATA_VERSION = "1"
|
||||||
|
|
||||||
|
BACKBLAZE_REALM = "production"
|
||||||
56
homeassistant/components/backblaze_b2/diagnostics.py
Normal file
56
homeassistant/components/backblaze_b2/diagnostics.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""Diagnostics support for Backblaze B2."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from . import BackblazeConfigEntry
|
||||||
|
from .const import CONF_APPLICATION_KEY, CONF_KEY_ID
|
||||||
|
|
||||||
|
TO_REDACT_ENTRY_DATA = {CONF_APPLICATION_KEY, CONF_KEY_ID}
|
||||||
|
TO_REDACT_ACCOUNT_DATA_ALLOWED = {"bucketId", "bucketName", "namePrefix"}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, entry: BackblazeConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
bucket = entry.runtime_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
bucket_info = {
|
||||||
|
"name": bucket.name,
|
||||||
|
"id": bucket.id_,
|
||||||
|
"type": bucket.type_,
|
||||||
|
"cors_rules": bucket.cors_rules,
|
||||||
|
"lifecycle_rules": bucket.lifecycle_rules,
|
||||||
|
"revision": bucket.revision,
|
||||||
|
}
|
||||||
|
|
||||||
|
account_info = bucket.api.account_info
|
||||||
|
account_data: dict[str, Any] = {
|
||||||
|
"account_id": account_info.get_account_id(),
|
||||||
|
"api_url": account_info.get_api_url(),
|
||||||
|
"download_url": account_info.get_download_url(),
|
||||||
|
"minimum_part_size": account_info.get_minimum_part_size(),
|
||||||
|
"allowed": account_info.get_allowed(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if isinstance(account_data["allowed"], dict):
|
||||||
|
account_data["allowed"] = async_redact_data(
|
||||||
|
account_data["allowed"], TO_REDACT_ACCOUNT_DATA_ALLOWED
|
||||||
|
)
|
||||||
|
|
||||||
|
except (AttributeError, TypeError, ValueError, KeyError):
|
||||||
|
bucket_info = {"name": "unknown", "id": "unknown"}
|
||||||
|
account_data = {"error": "Failed to retrieve detailed account information"}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"entry_data": async_redact_data(entry.data, TO_REDACT_ENTRY_DATA),
|
||||||
|
"entry_options": entry.options,
|
||||||
|
"bucket_info": bucket_info,
|
||||||
|
"account_info": account_data,
|
||||||
|
}
|
||||||
12
homeassistant/components/backblaze_b2/manifest.json
Normal file
12
homeassistant/components/backblaze_b2/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"domain": "backblaze_b2",
|
||||||
|
"name": "Backblaze B2",
|
||||||
|
"codeowners": ["@hugo-vrijswijk", "@ElCruncharino"],
|
||||||
|
"config_flow": true,
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/backblaze_b2",
|
||||||
|
"integration_type": "service",
|
||||||
|
"iot_class": "cloud_push",
|
||||||
|
"loggers": ["b2sdk"],
|
||||||
|
"quality_scale": "bronze",
|
||||||
|
"requirements": ["b2sdk==2.8.1"]
|
||||||
|
}
|
||||||
124
homeassistant/components/backblaze_b2/quality_scale.yaml
Normal file
124
homeassistant/components/backblaze_b2/quality_scale.yaml
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not register custom actions.
|
||||||
|
appropriate-polling:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not poll.
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have any custom actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: Entities of this integration do not explicitly subscribe to events.
|
||||||
|
entity-unique-id:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not have entities.
|
||||||
|
has-entity-name:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not have entities.
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
|
||||||
|
# Silver
|
||||||
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not register custom actions.
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have an options flow.
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
parallel-updates:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not poll.
|
||||||
|
reauthentication-flow: done
|
||||||
|
test-coverage: done
|
||||||
|
|
||||||
|
# Gold
|
||||||
|
devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
diagnostics: done
|
||||||
|
discovery-update-info:
|
||||||
|
status: exempt
|
||||||
|
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||||
|
discovery:
|
||||||
|
status: exempt
|
||||||
|
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||||
|
docs-data-update:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not poll.
|
||||||
|
docs-examples:
|
||||||
|
status: exempt
|
||||||
|
comment: The integration extends core functionality and does not require examples.
|
||||||
|
docs-known-limitations: done
|
||||||
|
docs-supported-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not support physical devices.
|
||||||
|
docs-supported-functions:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
docs-troubleshooting: todo
|
||||||
|
docs-use-cases: done
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have devices.
|
||||||
|
entity-category:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
entity-device-class:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
entity-disabled-by-default:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
entity-translations:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
exception-translations: done
|
||||||
|
icon-translations:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not use icons.
|
||||||
|
reconfiguration-flow: done
|
||||||
|
repair-issues: done
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have devices.
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The b2sdk library is synchronous by design. All sync operations are properly
|
||||||
|
wrapped with async_add_executor_job to prevent blocking the event loop.
|
||||||
|
inject-websession:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The b2sdk library does not support custom HTTP session injection.
|
||||||
|
It manages HTTP connections internally through its own session management.
|
||||||
|
strict-typing:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The b2sdk dependency does not include a py.typed file and is not PEP 561 compliant.
|
||||||
|
This is outside the integration's control as it's a third-party library requirement.
|
||||||
93
homeassistant/components/backblaze_b2/repairs.py
Normal file
93
homeassistant/components/backblaze_b2/repairs.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
"""Repair issues for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from b2sdk.v2.exception import (
|
||||||
|
B2Error,
|
||||||
|
NonExistentBucket,
|
||||||
|
RestrictedBucket,
|
||||||
|
Unauthorized,
|
||||||
|
)
|
||||||
|
|
||||||
|
from homeassistant.components.repairs import ConfirmRepairFlow
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import issue_registry as ir
|
||||||
|
|
||||||
|
from .const import CONF_BUCKET, DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ISSUE_BUCKET_ACCESS_RESTRICTED = "bucket_access_restricted"
|
||||||
|
ISSUE_BUCKET_NOT_FOUND = "bucket_not_found"
|
||||||
|
|
||||||
|
|
||||||
|
def _create_issue(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
issue_type: str,
|
||||||
|
bucket_name: str,
|
||||||
|
) -> None:
|
||||||
|
"""Create a repair issue with standard parameters."""
|
||||||
|
ir.async_create_issue(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
f"{issue_type}_{entry.entry_id}",
|
||||||
|
is_fixable=False,
|
||||||
|
issue_domain=DOMAIN,
|
||||||
|
severity=ir.IssueSeverity.ERROR,
|
||||||
|
translation_key=issue_type,
|
||||||
|
translation_placeholders={
|
||||||
|
"brand_name": "Backblaze B2",
|
||||||
|
"title": entry.title,
|
||||||
|
"bucket_name": bucket_name,
|
||||||
|
"entry_id": entry.entry_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_bucket_access_restricted_issue(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||||
|
) -> None:
|
||||||
|
"""Create a repair issue for restricted bucket access."""
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, bucket_name)
|
||||||
|
|
||||||
|
|
||||||
|
def create_bucket_not_found_issue(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||||
|
) -> None:
|
||||||
|
"""Create a repair issue for non-existent bucket."""
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, bucket_name)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_check_for_repair_issues(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry
|
||||||
|
) -> None:
|
||||||
|
"""Check for common issues that require user action."""
|
||||||
|
bucket = entry.runtime_data
|
||||||
|
restricted_issue_id = f"{ISSUE_BUCKET_ACCESS_RESTRICTED}_{entry.entry_id}"
|
||||||
|
not_found_issue_id = f"{ISSUE_BUCKET_NOT_FOUND}_{entry.entry_id}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
await hass.async_add_executor_job(bucket.api.account_info.get_allowed)
|
||||||
|
ir.async_delete_issue(hass, DOMAIN, restricted_issue_id)
|
||||||
|
ir.async_delete_issue(hass, DOMAIN, not_found_issue_id)
|
||||||
|
except Unauthorized:
|
||||||
|
entry.async_start_reauth(hass)
|
||||||
|
except RestrictedBucket as err:
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, err.bucket_name)
|
||||||
|
except NonExistentBucket:
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, entry.data[CONF_BUCKET])
|
||||||
|
except B2Error as err:
|
||||||
|
_LOGGER.debug("B2 connectivity test failed: %s", err)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_create_fix_flow(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
issue_id: str,
|
||||||
|
data: dict[str, str | int | float | None] | None,
|
||||||
|
) -> ConfirmRepairFlow:
|
||||||
|
"""Create a fix flow for Backblaze B2 issues."""
|
||||||
|
return ConfirmRepairFlow()
|
||||||
92
homeassistant/components/backblaze_b2/strings.json
Normal file
92
homeassistant/components/backblaze_b2/strings.json
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
|
||||||
|
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
|
||||||
|
"invalid_credentials": "[%key:component::backblaze_b2::exceptions::invalid_credentials::message%]",
|
||||||
|
"invalid_prefix": "[%key:component::backblaze_b2::exceptions::invalid_prefix::message%]",
|
||||||
|
"restricted_bucket": "[%key:component::backblaze_b2::exceptions::restricted_bucket::message%]",
|
||||||
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth_confirm": {
|
||||||
|
"data": {
|
||||||
|
"application_key": "Application key",
|
||||||
|
"key_id": "Key ID"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"application_key": "Application key to connect to {brand_name}",
|
||||||
|
"key_id": "Key ID to connect to {brand_name}"
|
||||||
|
},
|
||||||
|
"description": "Update your {brand_name} credentials for bucket {bucket}.",
|
||||||
|
"title": "Reauthenticate {brand_name}"
|
||||||
|
},
|
||||||
|
"reconfigure": {
|
||||||
|
"data": {
|
||||||
|
"application_key": "Application key",
|
||||||
|
"bucket": "Bucket name",
|
||||||
|
"key_id": "Key ID",
|
||||||
|
"prefix": "Folder prefix (optional)"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"application_key": "Application key to connect to {brand_name}",
|
||||||
|
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||||
|
"key_id": "Key ID to connect to {brand_name}",
|
||||||
|
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||||
|
},
|
||||||
|
"title": "Reconfigure {brand_name}"
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"application_key": "Application key",
|
||||||
|
"bucket": "Bucket name",
|
||||||
|
"key_id": "Key ID",
|
||||||
|
"prefix": "Folder prefix (optional)"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"application_key": "Application key to connect to {brand_name}",
|
||||||
|
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||||
|
"key_id": "Key ID to connect to {brand_name}",
|
||||||
|
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||||
|
},
|
||||||
|
"title": "Add {brand_name} backup"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"cannot_connect": {
|
||||||
|
"message": "Cannot connect to endpoint"
|
||||||
|
},
|
||||||
|
"invalid_bucket_name": {
|
||||||
|
"message": "Bucket does not exist or is not writable by the provided credentials."
|
||||||
|
},
|
||||||
|
"invalid_capability": {
|
||||||
|
"message": "Application key does not have the required read/write capabilities."
|
||||||
|
},
|
||||||
|
"invalid_credentials": {
|
||||||
|
"message": "Bucket cannot be accessed using provided of key ID and application key."
|
||||||
|
},
|
||||||
|
"invalid_prefix": {
|
||||||
|
"message": "Prefix is not allowed for provided key. Must start with {allowed_prefix}."
|
||||||
|
},
|
||||||
|
"restricted_bucket": {
|
||||||
|
"message": "Application key is restricted to bucket {restricted_bucket_name}."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"issues": {
|
||||||
|
"bucket_access_restricted": {
|
||||||
|
"description": "Access to your {brand_name} bucket {bucket_name} is restricted for the current credentials. This means your application key may only have access to specific buckets, but not this one. To fix this issue:\n\n1. Log in to your {brand_name} account\n2. Check your application key restrictions\n3. Either use a different bucket that your key can access, or create a new application key with access to {bucket_name}\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||||
|
"title": "{brand_name} bucket access restricted"
|
||||||
|
},
|
||||||
|
"bucket_not_found": {
|
||||||
|
"description": "The {brand_name} bucket {bucket_name} cannot be found or accessed. This could mean:\n\n1. The bucket was deleted\n2. The bucket name was changed\n3. Your credentials no longer have access to this bucket\n\nTo fix this issue:\n\n1. Log in to your {brand_name} account\n2. Verify the bucket still exists and check its name\n3. Ensure your application key has access to this bucket\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||||
|
"title": "{brand_name} bucket not found"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "calculated",
|
"iot_class": "calculated",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": ["mozart-api==4.1.1.116.4"],
|
"requirements": ["mozart-api==5.1.0.247.1"],
|
||||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["bluecurrent_api"],
|
"loggers": ["bluecurrent_api"],
|
||||||
"requirements": ["bluecurrent-api==1.3.1"]
|
"requirements": ["bluecurrent-api==1.3.2"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
title=self._discovered_devices[address], data={}
|
title=self._discovered_devices[address], data={}
|
||||||
)
|
)
|
||||||
|
|
||||||
current_addresses = self._async_current_ids()
|
current_addresses = self._async_current_ids(include_ignore=False)
|
||||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||||
address = discovery_info.address
|
address = discovery_info.address
|
||||||
if address in current_addresses or address in self._discovered_devices:
|
if address in current_addresses or address in self._discovered_devices:
|
||||||
|
|||||||
@@ -20,7 +20,7 @@
|
|||||||
"bluetooth-adapters==2.1.0",
|
"bluetooth-adapters==2.1.0",
|
||||||
"bluetooth-auto-recovery==1.5.3",
|
"bluetooth-auto-recovery==1.5.3",
|
||||||
"bluetooth-data-tools==1.28.4",
|
"bluetooth-data-tools==1.28.4",
|
||||||
"dbus-fast==2.44.5",
|
"dbus-fast==3.0.0",
|
||||||
"habluetooth==5.7.0"
|
"habluetooth==5.7.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -99,6 +99,12 @@ def deserialize_entity_description(
|
|||||||
descriptions_class = descriptions_class._dataclass # noqa: SLF001
|
descriptions_class = descriptions_class._dataclass # noqa: SLF001
|
||||||
for field in cached_fields(descriptions_class):
|
for field in cached_fields(descriptions_class):
|
||||||
field_name = field.name
|
field_name = field.name
|
||||||
|
# Only set fields that are in the data
|
||||||
|
# otherwise we would override default values with None
|
||||||
|
# causing side effects
|
||||||
|
if field_name not in data:
|
||||||
|
continue
|
||||||
|
|
||||||
# It would be nice if field.type returned the actual
|
# It would be nice if field.type returned the actual
|
||||||
# type instead of a str so we could avoid writing this
|
# type instead of a str so we could avoid writing this
|
||||||
# out, but it doesn't. If we end up using this in more
|
# out, but it doesn't. If we end up using this in more
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
|
|||||||
from homeassistant.components.snmp import async_get_snmp_engine
|
from homeassistant.components.snmp import async_get_snmp_engine
|
||||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_COMMUNITY,
|
CONF_COMMUNITY,
|
||||||
@@ -50,6 +50,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
|||||||
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
|
if brother.serial.lower() != entry.unique_id:
|
||||||
|
raise ConfigEntryError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="serial_mismatch",
|
||||||
|
translation_placeholders={
|
||||||
|
"device": entry.title,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
entry.runtime_data = coordinator
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.data_entry_flow import section
|
from homeassistant.data_entry_flow import section
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||||
from homeassistant.util.network import is_host_valid
|
from homeassistant.util.network import is_host_valid
|
||||||
|
|
||||||
@@ -21,6 +22,7 @@ from .const import (
|
|||||||
DEFAULT_COMMUNITY,
|
DEFAULT_COMMUNITY,
|
||||||
DEFAULT_PORT,
|
DEFAULT_PORT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
PRINTER_TYPE_LASER,
|
||||||
PRINTER_TYPES,
|
PRINTER_TYPES,
|
||||||
SECTION_ADVANCED_SETTINGS,
|
SECTION_ADVANCED_SETTINGS,
|
||||||
)
|
)
|
||||||
@@ -28,7 +30,12 @@ from .const import (
|
|||||||
DATA_SCHEMA = vol.Schema(
|
DATA_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_HOST): str,
|
vol.Required(CONF_HOST): str,
|
||||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||||
|
SelectSelectorConfig(
|
||||||
|
options=PRINTER_TYPES,
|
||||||
|
translation_key="printer_type",
|
||||||
|
)
|
||||||
|
),
|
||||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
vol.Schema(
|
vol.Schema(
|
||||||
{
|
{
|
||||||
@@ -42,7 +49,12 @@ DATA_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
ZEROCONF_SCHEMA = vol.Schema(
|
ZEROCONF_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||||
|
SelectSelectorConfig(
|
||||||
|
options=PRINTER_TYPES,
|
||||||
|
translation_key="printer_type",
|
||||||
|
)
|
||||||
|
),
|
||||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
vol.Schema(
|
vol.Schema(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -7,7 +7,10 @@ from typing import Final
|
|||||||
|
|
||||||
DOMAIN: Final = "brother"
|
DOMAIN: Final = "brother"
|
||||||
|
|
||||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
PRINTER_TYPE_LASER = "laser"
|
||||||
|
PRINTER_TYPE_INK = "ink"
|
||||||
|
|
||||||
|
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
|
||||||
|
|
||||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
|
|||||||
30
homeassistant/components/brother/entity.py
Normal file
30
homeassistant/components/brother/entity.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""Define the Brother entity."""
|
||||||
|
|
||||||
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import BrotherDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||||
|
"""Define a Brother Printer entity."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: BrotherDataUpdateCoordinator,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
configuration_url=f"http://{coordinator.brother.host}/",
|
||||||
|
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||||
|
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||||
|
serial_number=coordinator.brother.serial,
|
||||||
|
manufacturer="Brother",
|
||||||
|
model_id=coordinator.brother.model,
|
||||||
|
name=coordinator.brother.model,
|
||||||
|
sw_version=coordinator.brother.firmware,
|
||||||
|
)
|
||||||
@@ -8,7 +8,8 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||||
"requirements": ["brother==5.1.1"],
|
"quality_scale": "platinum",
|
||||||
|
"requirements": ["brother==6.0.0"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"name": "brother*",
|
"name": "brother*",
|
||||||
|
|||||||
78
homeassistant/components/brother/quality_scale.yaml
Normal file
78
homeassistant/components/brother/quality_scale.yaml
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: The integration does not register services.
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: The integration does not register services.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup: done
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
|
||||||
|
# Silver
|
||||||
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: The integration does not register services.
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: No options to configure.
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable: done
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable: done
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow:
|
||||||
|
status: exempt
|
||||||
|
comment: SNMP doesn't return error identifying an authentication problem, to change the SNMP community (simple password) the user should use reconfigure flow.
|
||||||
|
test-coverage: done
|
||||||
|
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: done
|
||||||
|
discovery-update-info: done
|
||||||
|
discovery: done
|
||||||
|
docs-data-update: done
|
||||||
|
docs-examples: done
|
||||||
|
docs-known-limitations: done
|
||||||
|
docs-supported-devices: done
|
||||||
|
docs-supported-functions: done
|
||||||
|
docs-troubleshooting: done
|
||||||
|
docs-use-cases: done
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration has a fixed single device.
|
||||||
|
entity-category: done
|
||||||
|
entity-device-class: done
|
||||||
|
entity-disabled-by-default: done
|
||||||
|
entity-translations: done
|
||||||
|
exception-translations: done
|
||||||
|
icon-translations: done
|
||||||
|
reconfiguration-flow: done
|
||||||
|
repair-issues:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration has a fixed single device.
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession:
|
||||||
|
status: exempt
|
||||||
|
comment: The integration does not connect via HTTP instead it uses a shared SNMP engine.
|
||||||
|
strict-typing: done
|
||||||
@@ -17,15 +17,17 @@ from homeassistant.components.sensor import (
|
|||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.typing import StateType
|
from homeassistant.helpers.typing import StateType
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||||
|
from .entity import BrotherPrinterEntity
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
ATTR_COUNTER = "counter"
|
ATTR_COUNTER = "counter"
|
||||||
ATTR_REMAINING_PAGES = "remaining_pages"
|
ATTR_REMAINING_PAGES = "remaining_pages"
|
||||||
@@ -330,12 +332,9 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BrotherPrinterSensor(
|
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||||
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
|
"""Define a Brother Printer sensor."""
|
||||||
):
|
|
||||||
"""Define an Brother Printer sensor."""
|
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
entity_description: BrotherSensorEntityDescription
|
entity_description: BrotherSensorEntityDescription
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -345,22 +344,11 @@ class BrotherPrinterSensor(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
self._attr_device_info = DeviceInfo(
|
|
||||||
configuration_url=f"http://{coordinator.brother.host}/",
|
|
||||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
|
||||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
|
||||||
serial_number=coordinator.brother.serial,
|
|
||||||
manufacturer="Brother",
|
|
||||||
model=coordinator.brother.model,
|
|
||||||
name=coordinator.brother.model,
|
|
||||||
sw_version=coordinator.brother.firmware,
|
|
||||||
)
|
|
||||||
self._attr_native_value = description.value(coordinator.data)
|
|
||||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
|
|
||||||
@callback
|
@property
|
||||||
def _handle_coordinator_update(self) -> None:
|
def native_value(self) -> StateType | datetime:
|
||||||
"""Handle updated data from the coordinator."""
|
"""Return the native value of the sensor."""
|
||||||
self._attr_native_value = self.entity_description.value(self.coordinator.data)
|
return self.entity_description.value(self.coordinator.data)
|
||||||
self.async_write_ha_state()
|
|
||||||
|
|||||||
@@ -38,11 +38,11 @@
|
|||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"type": "Type of the printer"
|
"type": "Printer type"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"host": "The hostname or IP address of the Brother printer to control.",
|
"host": "The hostname or IP address of the Brother printer to control.",
|
||||||
"type": "Brother printer type: ink or laser."
|
"type": "The type of the Brother printer."
|
||||||
},
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"advanced_settings": {
|
"advanced_settings": {
|
||||||
@@ -207,8 +207,19 @@
|
|||||||
"cannot_connect": {
|
"cannot_connect": {
|
||||||
"message": "An error occurred while connecting to the {device} printer: {error}"
|
"message": "An error occurred while connecting to the {device} printer: {error}"
|
||||||
},
|
},
|
||||||
|
"serial_mismatch": {
|
||||||
|
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
|
||||||
|
},
|
||||||
"update_error": {
|
"update_error": {
|
||||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"selector": {
|
||||||
|
"printer_type": {
|
||||||
|
"options": {
|
||||||
|
"ink": "ink",
|
||||||
|
"laser": "laser"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,8 +74,11 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
|||||||
super().__init__(data.fast_coordinator, data)
|
super().__init__(data.fast_coordinator, data)
|
||||||
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
||||||
|
|
||||||
self._attr_min_temp = data.static.min_temp.value
|
# Set temperature range if available, otherwise use Home Assistant defaults
|
||||||
self._attr_max_temp = data.static.max_temp.value
|
if data.static.min_temp is not None and data.static.min_temp.value is not None:
|
||||||
|
self._attr_min_temp = data.static.min_temp.value
|
||||||
|
if data.static.max_temp is not None and data.static.max_temp.value is not None:
|
||||||
|
self._attr_max_temp = data.static.max_temp.value
|
||||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user