mirror of
https://github.com/esphome/esphome.git
synced 2025-10-25 03:28:42 +00:00
Compare commits
565 Commits
2025.9.1
...
clang_tidy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb53b00d08 | ||
|
|
71bb34e95c | ||
|
|
3224f4179c | ||
|
|
f1fa1f6503 | ||
|
|
dc8dfede3a | ||
|
|
ea9a8bacf7 | ||
|
|
c1aa0ae9ba | ||
|
|
3a7f2afc68 | ||
|
|
b901d6bee0 | ||
|
|
e6ce5c58d1 | ||
|
|
ebc0f5f7c9 | ||
|
|
0f87e7508b | ||
|
|
862bbb7fe1 | ||
|
|
020cea80b2 | ||
|
|
9c146a7070 | ||
|
|
afbd3f77af | ||
|
|
1e1fefbd0a | ||
|
|
1a2057df30 | ||
|
|
87ca8784ef | ||
|
|
a186c1062f | ||
|
|
ea38237f29 | ||
|
|
6aff1394ad | ||
|
|
0e34d1b64d | ||
|
|
1483cee0fb | ||
|
|
8c1bd2fd85 | ||
|
|
ea609dc0f6 | ||
|
|
913095f6be | ||
|
|
bb24ad4a30 | ||
|
|
0d612fecfc | ||
|
|
9c235b4140 | ||
|
|
70cb1793f3 | ||
|
|
3bdd351d49 | ||
|
|
b0ea3f57de | ||
|
|
c9312d5c27 | ||
|
|
33fea90c19 | ||
|
|
25f3b6a959 | ||
|
|
e993312640 | ||
|
|
85babe85e4 | ||
|
|
0266c897c9 | ||
|
|
bda7676e3a | ||
|
|
57e98ec3fc | ||
|
|
09b2ad071b | ||
|
|
fdecda3d65 | ||
|
|
a0922bc8b0 | ||
|
|
f25af18655 | ||
|
|
5db07c2d70 | ||
|
|
40823df7bc | ||
|
|
5e1019a6fa | ||
|
|
f3cdbd0a05 | ||
|
|
ddf1b67e49 | ||
|
|
b4d9fddd07 | ||
|
|
25f03074ab | ||
|
|
590f6ff70b | ||
|
|
a33ed5e47b | ||
|
|
c11a9bb97f | ||
|
|
acef2085d9 | ||
|
|
865663ce5f | ||
|
|
ae010fd6f1 | ||
|
|
91a10d0e36 | ||
|
|
d5c36eaf2a | ||
|
|
85f1019d90 | ||
|
|
bfeade1e2b | ||
|
|
b134d42e3b | ||
|
|
b3b65316f0 | ||
|
|
b61cec8e77 | ||
|
|
24243fb22c | ||
|
|
ba6c8c87c2 | ||
|
|
f5774cc138 | ||
|
|
6d09e68b2e | ||
|
|
fe9db75c27 | ||
|
|
2b832e9ee8 | ||
|
|
661e9f9991 | ||
|
|
39e23c323d | ||
|
|
bdfbac0301 | ||
|
|
9646653e57 | ||
|
|
c6c202e4f7 | ||
|
|
62f73c768e | ||
|
|
cd1215347e | ||
|
|
b8353b3117 | ||
|
|
5d3574c81f | ||
|
|
364e5ffd79 | ||
|
|
c38c2a1daf | ||
|
|
070b0882b8 | ||
|
|
7e2ccb7bc3 | ||
|
|
7f1173fcba | ||
|
|
a75ccf841c | ||
|
|
56eb605ec9 | ||
|
|
2c4818de00 | ||
|
|
2b94de8732 | ||
|
|
f71aed3a5c | ||
|
|
353e097085 | ||
|
|
14d76e9e4e | ||
|
|
f2e0a412db | ||
|
|
6943b1d985 | ||
|
|
18062d154f | ||
|
|
2b0b82b2fb | ||
|
|
3e1c8f37c5 | ||
|
|
236ca12d3e | ||
|
|
42f1b61e31 | ||
|
|
708f8a95e5 | ||
|
|
10ca86ae8d | ||
|
|
22056e0809 | ||
|
|
fe4857fabb | ||
|
|
3054c2bc29 | ||
|
|
b190f37ae7 | ||
|
|
28454b8219 | ||
|
|
332f52e149 | ||
|
|
ae1f54d398 | ||
|
|
2b3e7f38d2 | ||
|
|
5510ece6ac | ||
|
|
e19a85b523 | ||
|
|
cf02a08209 | ||
|
|
90e8c12df1 | ||
|
|
42bf5840c9 | ||
|
|
47817485e7 | ||
|
|
ded98ff705 | ||
|
|
7f8ca5ddef | ||
|
|
1b0ca3360e | ||
|
|
66263b40e1 | ||
|
|
9a29dec6d9 | ||
|
|
6d5e41ef7f | ||
|
|
fedfda6c29 | ||
|
|
6e2088f836 | ||
|
|
7a82379c88 | ||
|
|
c983581b6c | ||
|
|
f0ac61f247 | ||
|
|
63b113d823 | ||
|
|
85420b0606 | ||
|
|
00230f7cc6 | ||
|
|
63a87a5ef3 | ||
|
|
d75ae357c2 | ||
|
|
88d223d03a | ||
|
|
0381644605 | ||
|
|
48a557b005 | ||
|
|
b927b29a0a | ||
|
|
780ece73ff | ||
|
|
d7fcf8d57b | ||
|
|
82a3ca575f | ||
|
|
5913da5a89 | ||
|
|
8c13105ce1 | ||
|
|
72ec9b672e | ||
|
|
8f49b1da54 | ||
|
|
9ff6f344ab | ||
|
|
0e703ddbba | ||
|
|
2175c2909b | ||
|
|
cbdb9d4a56 | ||
|
|
baa010583e | ||
|
|
8e9a68a107 | ||
|
|
4c688a4b00 | ||
|
|
9eef281895 | ||
|
|
c3fd07f8bc | ||
|
|
8a15c18066 | ||
|
|
d02ed41eb4 | ||
|
|
07504c8208 | ||
|
|
b666b8e261 | ||
|
|
8627b56e36 | ||
|
|
69df07ddcf | ||
|
|
5bb69a968c | ||
|
|
7c02f2f10a | ||
|
|
fe07c34246 | ||
|
|
c652aa375a | ||
|
|
9fb254fdc2 | ||
|
|
3df4dbd3a6 | ||
|
|
6372099df3 | ||
|
|
8d8fcfeda2 | ||
|
|
0f356fcc79 | ||
|
|
aec60d122b | ||
|
|
c10f68ef0c | ||
|
|
bcc424afed | ||
|
|
be2c859df3 | ||
|
|
59f728488e | ||
|
|
04a0de556d | ||
|
|
13cfa30c67 | ||
|
|
da1959ab5d | ||
|
|
2b42903e9c | ||
|
|
742c9cbb53 | ||
|
|
e4bc465a3d | ||
|
|
5cec0941f8 | ||
|
|
72a7aeb430 | ||
|
|
53e6b28092 | ||
|
|
7f3c7bb5c6 | ||
|
|
c02c0b2a96 | ||
|
|
5f5092e29f | ||
|
|
2864bf1674 | ||
|
|
132e949927 | ||
|
|
8fa44e471d | ||
|
|
ccedcfb600 | ||
|
|
8b0ec0afe3 | ||
|
|
dca29ed89b | ||
|
|
728726e29e | ||
|
|
79f4ca20b8 | ||
|
|
3eca72e0b8 | ||
|
|
22c0f55cef | ||
|
|
fd8ecc9608 | ||
|
|
ac96a59d58 | ||
|
|
dceed992d8 | ||
|
|
b0c66c1c09 | ||
|
|
8f04a5b944 | ||
|
|
e6c21df30b | ||
|
|
842cb9033a | ||
|
|
a2cb415dfa | ||
|
|
1fac193535 | ||
|
|
34632f78cf | ||
|
|
b93c60e85a | ||
|
|
60dc055509 | ||
|
|
1f13d44c1b | ||
|
|
9ebfa9aaa8 | ||
|
|
6bc9ed0810 | ||
|
|
9b6e8b4b41 | ||
|
|
cad747c672 | ||
|
|
660adccda3 | ||
|
|
51fbc4f7a3 | ||
|
|
2cc5e24b38 | ||
|
|
3afa73b449 | ||
|
|
dcf2697a2a | ||
|
|
6a11700a6b | ||
|
|
9bd9b043c8 | ||
|
|
cb602c9b1a | ||
|
|
b54beb357a | ||
|
|
6abc2efd96 | ||
|
|
be51093a7e | ||
|
|
52219c4dcc | ||
|
|
590cae13c0 | ||
|
|
e15429b0f5 | ||
|
|
b5cc668a45 | ||
|
|
a1b0ae78e0 | ||
|
|
fcc8a809e6 | ||
|
|
48474c0f8c | ||
|
|
9f9c95dd09 | ||
|
|
a74fcbc8b6 | ||
|
|
c8b898f9c5 | ||
|
|
81bf2688b4 | ||
|
|
87d2c9868f | ||
|
|
5ca407e27c | ||
|
|
5bbc2ab482 | ||
|
|
309e8b4c92 | ||
|
|
eee2987c99 | ||
|
|
061e55f8c5 | ||
|
|
9ad462d8c6 | ||
|
|
56334b7832 | ||
|
|
a4b7e0c700 | ||
|
|
84ad7ee0e4 | ||
|
|
d006008539 | ||
|
|
f1af9d978c | ||
|
|
6bb1e4c9c0 | ||
|
|
785df05631 | ||
|
|
82bdb08884 | ||
|
|
b709ff84c3 | ||
|
|
93266ad08f | ||
|
|
2fac813f18 | ||
|
|
a62c7a03dd | ||
|
|
ec63247ae0 | ||
|
|
0fe6e7169c | ||
|
|
a0f4de1bfb | ||
|
|
a541549d23 | ||
|
|
b74715fe14 | ||
|
|
5aff20a624 | ||
|
|
7682b4e9a3 | ||
|
|
6eabf709c6 | ||
|
|
6209d4b493 | ||
|
|
f10c361454 | ||
|
|
27456c1370 | ||
|
|
1aeefbe547 | ||
|
|
3f3bce7ef4 | ||
|
|
0acc58d5a1 | ||
|
|
0b4ef0fea2 | ||
|
|
a067bdb769 | ||
|
|
301e7a7ac5 | ||
|
|
ac566b7fd6 | ||
|
|
fddb8b35f2 | ||
|
|
27e1095cd7 | ||
|
|
fa4541a4f3 | ||
|
|
24dcc1843e | ||
|
|
f670d775ac | ||
|
|
59a31adac2 | ||
|
|
a3c0acc7c9 | ||
|
|
ad2c5b96a9 | ||
|
|
9adc3bd943 | ||
|
|
ad296a7d74 | ||
|
|
fdd422c42a | ||
|
|
3d82301c3d | ||
|
|
2fa49be17d | ||
|
|
75867842ea | ||
|
|
cba85c0925 | ||
|
|
42d1269aaf | ||
|
|
f4df17673b | ||
|
|
e340397b41 | ||
|
|
abeadc7830 | ||
|
|
8d4b347e5c | ||
|
|
a7f556c25f | ||
|
|
3f4250fcd7 | ||
|
|
b532e04ae4 | ||
|
|
697cab45dd | ||
|
|
a88182c8e3 | ||
|
|
8cfb6578d1 | ||
|
|
eb16d322cd | ||
|
|
22e06ba063 | ||
|
|
7147479f90 | ||
|
|
e55df1babc | ||
|
|
4c8fc5f4e6 | ||
|
|
646508006c | ||
|
|
9384f0683b | ||
|
|
5e7f5bf890 | ||
|
|
2a8796437d | ||
|
|
1635767aa2 | ||
|
|
192856e8d1 | ||
|
|
71be5a5f65 | ||
|
|
f86b83cda5 | ||
|
|
74c055745f | ||
|
|
3edcdc7d80 | ||
|
|
94fea68e3e | ||
|
|
6880f9fc5c | ||
|
|
26ebac8cb8 | ||
|
|
5cf0046601 | ||
|
|
c68017ddb4 | ||
|
|
cfd241ff29 | ||
|
|
f757a19e82 | ||
|
|
e8854e0659 | ||
|
|
a3622d878d | ||
|
|
da2089c8be | ||
|
|
118663f9e2 | ||
|
|
4a99987bfe | ||
|
|
d164c06f01 | ||
|
|
972987acdf | ||
|
|
eea2b6b81b | ||
|
|
f62e06104e | ||
|
|
f26e71bae6 | ||
|
|
c6e4a7911c | ||
|
|
e2c5eeef97 | ||
|
|
7ea51b1865 | ||
|
|
aa1afbd152 | ||
|
|
20d9ae699c | ||
|
|
c0fb0ae06f | ||
|
|
9b6d62cd69 | ||
|
|
5932a4bd0e | ||
|
|
84c3cf5f17 | ||
|
|
120a445abf | ||
|
|
41c073a451 | ||
|
|
0fd71ca211 | ||
|
|
19439199cc | ||
|
|
39d5cbc74a | ||
|
|
722c5a94f2 | ||
|
|
7b48fc292f | ||
|
|
6c7d92e726 | ||
|
|
b1859c50bd | ||
|
|
3f9924eac2 | ||
|
|
874db20b7d | ||
|
|
2eea674c04 | ||
|
|
0137954f2b | ||
|
|
0a40a30e4a | ||
|
|
d43b844e06 | ||
|
|
2596b6096f | ||
|
|
6f8e82aeb6 | ||
|
|
ca0e738799 | ||
|
|
14a23101f2 | ||
|
|
2b389bb8f2 | ||
|
|
89c3340ef6 | ||
|
|
ba0532cda7 | ||
|
|
5419b8bddb | ||
|
|
624868bb05 | ||
|
|
f2aa5a754c | ||
|
|
638c6cc14e | ||
|
|
8137d7600a | ||
|
|
08afc3030a | ||
|
|
1deb79a24b | ||
|
|
de21c61b6a | ||
|
|
db1aa82350 | ||
|
|
fe4799b300 | ||
|
|
93e18e850e | ||
|
|
5cef75dbe1 | ||
|
|
4194a940ae | ||
|
|
59c0ffb98b | ||
|
|
29658b79bc | ||
|
|
158a59aa83 | ||
|
|
c95180504a | ||
|
|
848ba6b717 | ||
|
|
922f4b6352 | ||
|
|
fd3c05b42e | ||
|
|
ab1f8326ee | ||
|
|
2a915e4efd | ||
|
|
f5e85a424f | ||
|
|
c69603d916 | ||
|
|
d75b7708a5 | ||
|
|
b023453e81 | ||
|
|
a5ba6237cb | ||
|
|
0e623055df | ||
|
|
6018f5f5d1 | ||
|
|
96868aa754 | ||
|
|
83d86c8c59 | ||
|
|
7703cabb7b | ||
|
|
300f1de11c | ||
|
|
3b73738d9f | ||
|
|
b176d1f890 | ||
|
|
2aaafd6ebb | ||
|
|
a96c013eb1 | ||
|
|
054b215d8d | ||
|
|
e3e98e2568 | ||
|
|
29db576f79 | ||
|
|
58166b3e71 | ||
|
|
345fc0b6ca | ||
|
|
127058e700 | ||
|
|
57f7a709cf | ||
|
|
f2a9e9265e | ||
|
|
1ecd26adb5 | ||
|
|
6d9fc672d5 | ||
|
|
b9361b0868 | ||
|
|
0246a8eb1d | ||
|
|
a56d044d98 | ||
|
|
f6253d52b4 | ||
|
|
77dff52183 | ||
|
|
4b86f31b66 | ||
|
|
78655968df | ||
|
|
ab79e596b5 | ||
|
|
ef73ae2116 | ||
|
|
0111f725ff | ||
|
|
34b4cb46f6 | ||
|
|
a2f833d665 | ||
|
|
a7042687c1 | ||
|
|
0d2d18c198 | ||
|
|
3f03e8c423 | ||
|
|
9dd6be4061 | ||
|
|
2bf79a607f | ||
|
|
61a11547ca | ||
|
|
abf522bbb9 | ||
|
|
25fc16163b | ||
|
|
55593628ef | ||
|
|
1f90d89731 | ||
|
|
1560b8b8e2 | ||
|
|
b26776fad4 | ||
|
|
875ada86b0 | ||
|
|
195d1be4a9 | ||
|
|
2b12ff5874 | ||
|
|
250b94d113 | ||
|
|
28199c1cf8 | ||
|
|
eeb3ccaef7 | ||
|
|
460eb219ba | ||
|
|
cef9cf49bf | ||
|
|
28f09f9ed1 | ||
|
|
3eb502b328 | ||
|
|
7af77d0f82 | ||
|
|
1c229947a8 | ||
|
|
74f09a2b59 | ||
|
|
549626bee2 | ||
|
|
65a1d2b2ff | ||
|
|
f7ed127182 | ||
|
|
44767c32cf | ||
|
|
0cc03dfe32 | ||
|
|
1922b7b3ed | ||
|
|
f22143f090 | ||
|
|
be92903a6f | ||
|
|
538941b3fd | ||
|
|
ce8ac8b89d | ||
|
|
6d0f134ff1 | ||
|
|
11ccf0e591 | ||
|
|
adfacdf1b7 | ||
|
|
f8226cd481 | ||
|
|
63326cbd6d | ||
|
|
d0d7abb542 | ||
|
|
cd7922faaf | ||
|
|
365e3afa9b | ||
|
|
e9c2e211ef | ||
|
|
afda9500bf | ||
|
|
bc7fc8df18 | ||
|
|
2f8a4d0caa | ||
|
|
a7ee7b962e | ||
|
|
3cb2a4569c | ||
|
|
3b20969171 | ||
|
|
3b40172073 | ||
|
|
2e220fcca2 | ||
|
|
56e8af79c3 | ||
|
|
25e9ec1782 | ||
|
|
1771c852af | ||
|
|
8714a45a5c | ||
|
|
5e94460608 | ||
|
|
d302c0c600 | ||
|
|
5c943d7c13 | ||
|
|
7629903afb | ||
|
|
68eb4091b8 | ||
|
|
5062e7a0e1 | ||
|
|
30bb640c89 | ||
|
|
fbb48c504f | ||
|
|
440b0b5574 | ||
|
|
c64d385fa6 | ||
|
|
0432a10543 | ||
|
|
4729bc87fa | ||
|
|
e3b64103cc | ||
|
|
ebdcb3e4d9 | ||
|
|
971522574d | ||
|
|
73e939dbbc | ||
|
|
a96798ef98 | ||
|
|
923e7049f1 | ||
|
|
26df542036 | ||
|
|
1ccec6950a | ||
|
|
b3a122de3c | ||
|
|
9ea3643b74 | ||
|
|
de617c85c7 | ||
|
|
9c201afe76 | ||
|
|
2bb64a189d | ||
|
|
9853a2e6ab | ||
|
|
fad0ec7793 | ||
|
|
a302cec993 | ||
|
|
6781da45cb | ||
|
|
37d526f003 | ||
|
|
d74cfefeef | ||
|
|
1ffb9d972a | ||
|
|
4e5339801b | ||
|
|
b8cee477fe | ||
|
|
ff2df278d6 | ||
|
|
429e989b69 | ||
|
|
28541bdb1c | ||
|
|
11c595bb09 | ||
|
|
fd888eaa68 | ||
|
|
3a233b2fd0 | ||
|
|
4426bf6029 | ||
|
|
27fa18dcec | ||
|
|
22989592f0 | ||
|
|
1f4b10f523 | ||
|
|
cbaf8d309b | ||
|
|
660223e269 | ||
|
|
6d1de2106e | ||
|
|
90e33306f1 | ||
|
|
f3ac21b3b4 | ||
|
|
4859fe67eb | ||
|
|
a723673dcc | ||
|
|
612fb4cc3c | ||
|
|
5fac67d195 | ||
|
|
d671862e9a | ||
|
|
459ef7f262 | ||
|
|
bd9dc43e59 | ||
|
|
1d5a3b647d | ||
|
|
af3e1788d1 | ||
|
|
b946cb160d | ||
|
|
e0241e9dcd | ||
|
|
1accc409f6 | ||
|
|
f756de276b | ||
|
|
ac07a00141 | ||
|
|
7ae11de2e4 | ||
|
|
bb6be9c939 | ||
|
|
9c85a7eff3 | ||
|
|
10a665b864 | ||
|
|
35dce3c80d | ||
|
|
7e6b11ce84 | ||
|
|
adcba4fd9a | ||
|
|
d3592c451b | ||
|
|
24eb33a1c0 | ||
|
|
cf1fef8cfb | ||
|
|
28bba0666c | ||
|
|
4390fd80a3 | ||
|
|
4813c5134e | ||
|
|
bbef0e173e | ||
|
|
3240e19a7c | ||
|
|
ac0cd946f0 | ||
|
|
61bac6c6e6 | ||
|
|
5fd64c5c89 | ||
|
|
625f108183 | ||
|
|
c45efe8f40 | ||
|
|
fe1371f4dc | ||
|
|
e3f8a36eaa | ||
|
|
41f0d1c622 | ||
|
|
6469bb168d | ||
|
|
af0da3f897 | ||
|
|
32e4eb26ad | ||
|
|
10aae33979 | ||
|
|
55dd12c66b | ||
|
|
9dd17b464d |
@@ -186,6 +186,11 @@ This document provides essential context for AI models interacting with this pro
|
||||
└── components/[component]/ # Component-specific tests
|
||||
```
|
||||
Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
|
||||
* **Testing All Components Together:** To verify that all components can be tested together without ID conflicts or configuration issues, use:
|
||||
```bash
|
||||
./script/test_component_grouping.py -e config --all
|
||||
```
|
||||
This tests all components in a single build to catch conflicts that might not appear when testing components individually. Use `-e config` for fast configuration validation, or `-e compile` for full compilation testing.
|
||||
* **Debugging and Troubleshooting:**
|
||||
* **Debug Tools:**
|
||||
- `esphome config <file>.yaml` to validate configuration.
|
||||
@@ -216,6 +221,146 @@ This document provides essential context for AI models interacting with this pro
|
||||
* **Component Development:** Keep dependencies minimal, provide clear error messages, and write comprehensive docstrings and tests.
|
||||
* **Code Generation:** Generate minimal and efficient C++ code. Validate all user inputs thoroughly. Support multiple platform variations.
|
||||
* **Configuration Design:** Aim for simplicity with sensible defaults, while allowing for advanced customization.
|
||||
* **Embedded Systems Optimization:** ESPHome targets resource-constrained microcontrollers. Be mindful of flash size and RAM usage.
|
||||
|
||||
**STL Container Guidelines:**
|
||||
|
||||
ESPHome runs on embedded systems with limited resources. Choose containers carefully:
|
||||
|
||||
1. **Compile-time-known sizes:** Use `std::array` instead of `std::vector` when size is known at compile time.
|
||||
```cpp
|
||||
// Bad - generates STL realloc code
|
||||
std::vector<int> values;
|
||||
|
||||
// Good - no dynamic allocation
|
||||
std::array<int, MAX_VALUES> values;
|
||||
```
|
||||
Use `cg.add_define("MAX_VALUES", count)` to set the size from Python configuration.
|
||||
|
||||
**For byte buffers:** Avoid `std::vector<uint8_t>` unless the buffer needs to grow. Use `std::unique_ptr<uint8_t[]>` instead.
|
||||
|
||||
> **Note:** `std::unique_ptr<uint8_t[]>` does **not** provide bounds checking or iterator support like `std::vector<uint8_t>`. Use it only when you do not need these features and want minimal overhead.
|
||||
|
||||
```cpp
|
||||
// Bad - STL overhead for simple byte buffer
|
||||
std::vector<uint8_t> buffer;
|
||||
buffer.resize(256);
|
||||
|
||||
// Good - minimal overhead, single allocation
|
||||
std::unique_ptr<uint8_t[]> buffer = std::make_unique<uint8_t[]>(256);
|
||||
// Or if size is constant:
|
||||
std::array<uint8_t, 256> buffer;
|
||||
```
|
||||
|
||||
2. **Compile-time-known fixed sizes with vector-like API:** Use `StaticVector` from `esphome/core/helpers.h` for fixed-size stack allocation with `push_back()` interface.
|
||||
```cpp
|
||||
// Bad - generates STL realloc code (_M_realloc_insert)
|
||||
std::vector<ServiceRecord> services;
|
||||
services.reserve(5); // Still includes reallocation machinery
|
||||
|
||||
// Good - compile-time fixed size, stack allocated, no reallocation machinery
|
||||
StaticVector<ServiceRecord, MAX_SERVICES> services; // Allocates all MAX_SERVICES on stack
|
||||
services.push_back(record1); // Tracks count but all slots allocated
|
||||
```
|
||||
Use `cg.add_define("MAX_SERVICES", count)` to set the size from Python configuration.
|
||||
Like `std::array` but with vector-like API (`push_back()`, `size()`) and no STL reallocation code.
|
||||
|
||||
3. **Runtime-known sizes:** Use `FixedVector` from `esphome/core/helpers.h` when the size is only known at runtime initialization.
|
||||
```cpp
|
||||
// Bad - generates STL realloc code (_M_realloc_insert)
|
||||
std::vector<TxtRecord> txt_records;
|
||||
txt_records.reserve(5); // Still includes reallocation machinery
|
||||
|
||||
// Good - runtime size, single allocation, no reallocation machinery
|
||||
FixedVector<TxtRecord> txt_records;
|
||||
txt_records.init(record_count); // Initialize with exact size at runtime
|
||||
```
|
||||
**Benefits:**
|
||||
- Eliminates `_M_realloc_insert`, `_M_default_append` template instantiations (saves 200-500 bytes per instance)
|
||||
- Single allocation, no upper bound needed
|
||||
- No reallocation overhead
|
||||
- Compatible with protobuf code generation when using `[(fixed_vector) = true]` option
|
||||
|
||||
4. **Small datasets (1-16 elements):** Use `std::vector` or `std::array` with simple structs instead of `std::map`/`std::set`/`std::unordered_map`.
|
||||
```cpp
|
||||
// Bad - 2KB+ overhead for red-black tree/hash table
|
||||
std::map<std::string, int> small_lookup;
|
||||
std::unordered_map<int, std::string> tiny_map;
|
||||
|
||||
// Good - simple struct with linear search (std::vector is fine)
|
||||
struct LookupEntry {
|
||||
const char *key;
|
||||
int value;
|
||||
};
|
||||
std::vector<LookupEntry> small_lookup = {
|
||||
{"key1", 10},
|
||||
{"key2", 20},
|
||||
{"key3", 30},
|
||||
};
|
||||
// Or std::array if size is compile-time constant:
|
||||
// std::array<LookupEntry, 3> small_lookup = {{ ... }};
|
||||
```
|
||||
Linear search on small datasets (1-16 elements) is often faster than hashing/tree overhead, but this depends on lookup frequency and access patterns. For frequent lookups in hot code paths, the O(1) vs O(n) complexity difference may still matter even for small datasets. `std::vector` with simple structs is usually fine—it's the heavy containers (`map`, `set`, `unordered_map`) that should be avoided for small datasets unless profiling shows otherwise.
|
||||
|
||||
5. **Detection:** Look for these patterns in compiler output:
|
||||
- Large code sections with STL symbols (vector, map, set)
|
||||
- `alloc`, `realloc`, `dealloc` in symbol names
|
||||
- `_M_realloc_insert`, `_M_default_append` (vector reallocation)
|
||||
- Red-black tree code (`rb_tree`, `_Rb_tree`)
|
||||
- Hash table infrastructure (`unordered_map`, `hash`)
|
||||
|
||||
**When to optimize:**
|
||||
- Core components (API, network, logger)
|
||||
- Widely-used components (mdns, wifi, ble)
|
||||
- Components causing flash size complaints
|
||||
|
||||
**When not to optimize:**
|
||||
- Single-use niche components
|
||||
- Code where readability matters more than bytes
|
||||
- Already using appropriate containers
|
||||
|
||||
* **State Management:** Use `CORE.data` for component state that needs to persist during configuration generation. Avoid module-level mutable globals.
|
||||
|
||||
**Bad Pattern (Module-Level Globals):**
|
||||
```python
|
||||
# Don't do this - state persists between compilation runs
|
||||
_component_state = []
|
||||
_use_feature = None
|
||||
|
||||
def enable_feature():
|
||||
global _use_feature
|
||||
_use_feature = True
|
||||
```
|
||||
|
||||
**Good Pattern (CORE.data with Helpers):**
|
||||
```python
|
||||
from esphome.core import CORE
|
||||
|
||||
# Keys for CORE.data storage
|
||||
COMPONENT_STATE_KEY = "my_component_state"
|
||||
USE_FEATURE_KEY = "my_component_use_feature"
|
||||
|
||||
def _get_component_state() -> list:
|
||||
"""Get component state from CORE.data."""
|
||||
return CORE.data.setdefault(COMPONENT_STATE_KEY, [])
|
||||
|
||||
def _get_use_feature() -> bool | None:
|
||||
"""Get feature flag from CORE.data."""
|
||||
return CORE.data.get(USE_FEATURE_KEY)
|
||||
|
||||
def _set_use_feature(value: bool) -> None:
|
||||
"""Set feature flag in CORE.data."""
|
||||
CORE.data[USE_FEATURE_KEY] = value
|
||||
|
||||
def enable_feature():
|
||||
_set_use_feature(True)
|
||||
```
|
||||
|
||||
**Why this matters:**
|
||||
- Module-level globals persist between compilation runs if the dashboard doesn't fork/exec
|
||||
- `CORE.data` automatically clears between runs
|
||||
- Typed helper functions provide better IDE support and maintainability
|
||||
- Encapsulation makes state management explicit and testable
|
||||
|
||||
* **Security:** Be mindful of security when making changes to the API, web server, or any other network-related code. Do not hardcode secrets or keys.
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
4368db58e8f884aff245996b1e8b644cc0796c0bb2fa706d5740d40b823d3ac9
|
||||
d7693a1e996cacd4a3d1c9a16336799c2a8cc3db02e4e74084151ce964581248
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
[run]
|
||||
omit =
|
||||
esphome/components/*
|
||||
esphome/analyze_memory/*
|
||||
tests/integration/*
|
||||
|
||||
4
.github/actions/build-image/action.yaml
vendored
4
.github/actions/build-image/action.yaml
vendored
@@ -47,7 +47,7 @@ runs:
|
||||
|
||||
- name: Build and push to ghcr by digest
|
||||
id: build-ghcr
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
env:
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
DOCKER_BUILD_RECORD_UPLOAD: false
|
||||
@@ -73,7 +73,7 @@ runs:
|
||||
|
||||
- name: Build and push to dockerhub by digest
|
||||
id: build-dockerhub
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
env:
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
DOCKER_BUILD_RECORD_UPLOAD: false
|
||||
|
||||
4
.github/actions/restore-python/action.yml
vendored
4
.github/actions/restore-python/action.yml
vendored
@@ -17,12 +17,12 @@ runs:
|
||||
steps:
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
|
||||
6
.github/workflows/auto-label-pr.yml
vendored
6
.github/workflows/auto-label-pr.yml
vendored
@@ -22,17 +22,17 @@ jobs:
|
||||
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2
|
||||
with:
|
||||
app-id: ${{ secrets.ESPHOME_GITHUB_APP_ID }}
|
||||
private-key: ${{ secrets.ESPHOME_GITHUB_APP_PRIVATE_KEY }}
|
||||
|
||||
- name: Auto Label PR
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
|
||||
10
.github/workflows/ci-api-proto.yml
vendored
10
.github/workflows/ci-api-proto.yml
vendored
@@ -21,9 +21,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
fi
|
||||
- if: failure()
|
||||
name: Review PR
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
await github.rest.pulls.createReview({
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
run: git diff
|
||||
- if: failure()
|
||||
name: Archive artifacts
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: generated-proto-files
|
||||
path: |
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
esphome/components/api/api_pb2_service.*
|
||||
- if: success()
|
||||
name: Dismiss review
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
let reviews = await github.rest.pulls.listReviews({
|
||||
|
||||
9
.github/workflows/ci-clang-tidy-hash.yml
vendored
9
.github/workflows/ci-clang-tidy-hash.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
- ".clang-tidy"
|
||||
- "platformio.ini"
|
||||
- "requirements_dev.txt"
|
||||
- "sdkconfig.defaults"
|
||||
- ".clang-tidy.hash"
|
||||
- "script/clang_tidy_hash.py"
|
||||
- ".github/workflows/ci-clang-tidy-hash.yml"
|
||||
@@ -20,10 +21,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
@@ -41,7 +42,7 @@ jobs:
|
||||
|
||||
- if: failure()
|
||||
name: Request changes
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
await github.rest.pulls.createReview({
|
||||
@@ -54,7 +55,7 @@ jobs:
|
||||
|
||||
- if: success()
|
||||
name: Dismiss review
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
let reviews = await github.rest.pulls.listReviews({
|
||||
|
||||
6
.github/workflows/ci-docker.yml
vendored
6
.github/workflows/ci-docker.yml
vendored
@@ -43,13 +43,13 @@ jobs:
|
||||
- "docker"
|
||||
# - "lint"
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.11.1
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Set TAG
|
||||
run: |
|
||||
|
||||
108
.github/workflows/ci-memory-impact-comment.yml
vendored
Normal file
108
.github/workflows/ci-memory-impact-comment.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
---
|
||||
name: Memory Impact Comment (Forks)
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["CI"]
|
||||
types: [completed]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
memory-impact-comment:
|
||||
name: Post memory impact comment (fork PRs only)
|
||||
runs-on: ubuntu-24.04
|
||||
# Only run for PRs from forks that had successful CI runs
|
||||
if: >
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_repository.full_name != github.repository
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Get PR details
|
||||
id: pr
|
||||
run: |
|
||||
# Get PR details by searching for PR with matching head SHA
|
||||
# The workflow_run.pull_requests field is often empty for forks
|
||||
head_sha="${{ github.event.workflow_run.head_sha }}"
|
||||
pr_data=$(gh api "/repos/${{ github.repository }}/commits/$head_sha/pulls" \
|
||||
--jq '.[0] | {number: .number, base_ref: .base.ref}')
|
||||
if [ -z "$pr_data" ] || [ "$pr_data" == "null" ]; then
|
||||
echo "No PR found for SHA $head_sha, skipping"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
pr_number=$(echo "$pr_data" | jq -r '.number')
|
||||
base_ref=$(echo "$pr_data" | jq -r '.base_ref')
|
||||
|
||||
echo "pr_number=$pr_number" >> $GITHUB_OUTPUT
|
||||
echo "base_ref=$base_ref" >> $GITHUB_OUTPUT
|
||||
echo "Found PR #$pr_number targeting base branch: $base_ref"
|
||||
|
||||
- name: Check out code from base repository
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Always check out from the base repository (esphome/esphome), never from forks
|
||||
# Use the PR's target branch to ensure we run trusted code from the main repo
|
||||
repository: ${{ github.repository }}
|
||||
ref: ${{ steps.pr.outputs.base_ref }}
|
||||
|
||||
- name: Restore Python
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache-key: ${{ hashFiles('.cache-key') }}
|
||||
|
||||
- name: Download memory analysis artifacts
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
run: |
|
||||
run_id="${{ github.event.workflow_run.id }}"
|
||||
echo "Downloading artifacts from workflow run $run_id"
|
||||
|
||||
mkdir -p memory-analysis
|
||||
|
||||
# Download target analysis artifact
|
||||
if gh run download --name "memory-analysis-target" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
|
||||
echo "Downloaded memory-analysis-target artifact."
|
||||
else
|
||||
echo "No memory-analysis-target artifact found."
|
||||
fi
|
||||
|
||||
# Download PR analysis artifact
|
||||
if gh run download --name "memory-analysis-pr" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
|
||||
echo "Downloaded memory-analysis-pr artifact."
|
||||
else
|
||||
echo "No memory-analysis-pr artifact found."
|
||||
fi
|
||||
|
||||
- name: Check if artifacts exist
|
||||
id: check
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then
|
||||
echo "found=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "found=false" >> $GITHUB_OUTPUT
|
||||
echo "Memory analysis artifacts not found, skipping comment"
|
||||
fi
|
||||
|
||||
- name: Post or update PR comment
|
||||
if: steps.pr.outputs.skip != 'true' && steps.check.outputs.found == 'true'
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.pr.outputs.pr_number }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Pass PR number and JSON file paths directly to Python script
|
||||
# Let Python parse the JSON to avoid shell injection risks
|
||||
# The script will validate and sanitize all inputs
|
||||
python script/ci_memory_impact_comment.py \
|
||||
--pr-number "$PR_NUMBER" \
|
||||
--target-json ./memory-analysis/memory-analysis-target.json \
|
||||
--pr-json ./memory-analysis/memory-analysis-pr.json
|
||||
659
.github/workflows/ci.yml
vendored
659
.github/workflows/ci.yml
vendored
@@ -36,18 +36,18 @@ jobs:
|
||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Generate cache-key
|
||||
id: cache-key
|
||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
if: needs.determine-jobs.outputs.python-linters == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -105,6 +105,7 @@ jobs:
|
||||
script/ci-custom.py
|
||||
script/build_codeowners.py --check
|
||||
script/build_language_schema.py --check
|
||||
script/generate-esp32-boards.py --check
|
||||
|
||||
pytest:
|
||||
name: Run pytest
|
||||
@@ -113,8 +114,7 @@ jobs:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
- "3.14"
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macOS-latest
|
||||
@@ -123,20 +123,16 @@ jobs:
|
||||
# Minimize CI resource usage
|
||||
# by only running the Python version
|
||||
# version used for docker images on Windows and macOS
|
||||
- python-version: "3.13"
|
||||
- python-version: "3.14"
|
||||
os: windows-latest
|
||||
- python-version: "3.12"
|
||||
os: windows-latest
|
||||
- python-version: "3.13"
|
||||
os: macOS-latest
|
||||
- python-version: "3.12"
|
||||
- python-version: "3.14"
|
||||
os: macOS-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
id: restore-python
|
||||
uses: ./.github/actions/restore-python
|
||||
@@ -156,12 +152,12 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pytest -vv --cov-report=xml --tb=native -n auto tests --ignore=tests/integration/
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Save Python virtual environment cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@v4.2.4
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -174,12 +170,17 @@ jobs:
|
||||
outputs:
|
||||
integration-tests: ${{ steps.determine.outputs.integration-tests }}
|
||||
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
|
||||
clang-tidy-mode: ${{ steps.determine.outputs.clang-tidy-mode }}
|
||||
python-linters: ${{ steps.determine.outputs.python-linters }}
|
||||
changed-components: ${{ steps.determine.outputs.changed-components }}
|
||||
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
|
||||
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }}
|
||||
memory_impact: ${{ steps.determine.outputs.memory-impact }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Fetch enough history to find the merge base
|
||||
fetch-depth: 2
|
||||
@@ -201,9 +202,14 @@ jobs:
|
||||
# Extract individual fields
|
||||
echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT
|
||||
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
|
||||
echo "clang-tidy-mode=$(echo "$output" | jq -r '.clang_tidy_mode')" >> $GITHUB_OUTPUT
|
||||
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT
|
||||
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
|
||||
|
||||
integration-tests:
|
||||
name: Run integration tests
|
||||
@@ -214,15 +220,15 @@ jobs:
|
||||
if: needs.determine-jobs.outputs.integration-tests == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python 3.13
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -241,7 +247,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pytest -vv --no-cov --tb=native -n auto tests/integration/
|
||||
|
||||
clang-tidy:
|
||||
clang-tidy-single:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
@@ -259,22 +265,6 @@ jobs:
|
||||
name: Run script/clang-tidy for ESP8266
|
||||
options: --environment esp8266-arduino-tidy --grep USE_ESP8266
|
||||
pio_cache_key: tidyesp8266
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 1/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 2/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 3/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 4/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 IDF
|
||||
options: --environment esp32-idf-tidy --grep USE_ESP_IDF
|
||||
@@ -287,7 +277,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
@@ -300,14 +290,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
@@ -355,122 +345,597 @@ jobs:
|
||||
# yamllint disable-line rule:line-length
|
||||
if: always()
|
||||
|
||||
test-build-components:
|
||||
name: Component test ${{ matrix.file }}
|
||||
clang-tidy-nosplit:
|
||||
name: Run script/clang-tidy for ESP32 Arduino
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0 && fromJSON(needs.determine-jobs.outputs.component-test-count) < 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
|
||||
if: needs.determine-jobs.outputs.clang-tidy-mode == 'nosplit'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: test_build_components -e config -c ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
./script/test_build_components -e config -c ${{ matrix.file }}
|
||||
- name: test_build_components -e compile -c ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
./script/test_build_components -e compile -c ${{ matrix.file }}
|
||||
|
||||
test-build-components-splitter:
|
||||
name: Split components for testing into 20 groups maximum
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Check if full clang-tidy scan needed
|
||||
id: check_full_scan
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if python script/clang_tidy_hash.py --check; then
|
||||
echo "full_scan=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=hash_changed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "full_scan=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=normal" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
|
||||
echo "Running FULL clang-tidy scan (hash changed)"
|
||||
script/clang-tidy --all-headers --fix --environment esp32-arduino-tidy
|
||||
else
|
||||
echo "Running clang-tidy on changed files only"
|
||||
script/clang-tidy --all-headers --fix --changed --environment esp32-arduino-tidy
|
||||
fi
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
clang-tidy-split:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
if: needs.determine-jobs.outputs.clang-tidy-mode == 'split'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 1
|
||||
matrix:
|
||||
include:
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 1/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 2/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 3/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 4/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Check if full clang-tidy scan needed
|
||||
id: check_full_scan
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if python script/clang_tidy_hash.py --check; then
|
||||
echo "full_scan=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=hash_changed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "full_scan=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=normal" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
|
||||
echo "Running FULL clang-tidy scan (hash changed)"
|
||||
script/clang-tidy --all-headers --fix ${{ matrix.options }}
|
||||
else
|
||||
echo "Running clang-tidy on changed files only"
|
||||
script/clang-tidy --all-headers --fix --changed ${{ matrix.options }}
|
||||
fi
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
test-build-components-splitter:
|
||||
name: Split components for intelligent grouping (40 weighted per batch)
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||
outputs:
|
||||
matrix: ${{ steps.split.outputs.components }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Split components into 20 groups
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Split components intelligently based on bus configurations
|
||||
id: split
|
||||
run: |
|
||||
components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
|
||||
echo "components=$components" >> $GITHUB_OUTPUT
|
||||
. venv/bin/activate
|
||||
|
||||
# Use intelligent splitter that groups components with same bus configs
|
||||
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
|
||||
|
||||
# Only isolate directly changed components when targeting dev branch
|
||||
# For beta/release branches, group everything for faster CI
|
||||
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||
directly_changed='[]'
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
else
|
||||
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
||||
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components"
|
||||
fi
|
||||
|
||||
echo "Splitting components intelligently..."
|
||||
output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github)
|
||||
|
||||
echo "$output" >> $GITHUB_OUTPUT
|
||||
|
||||
test-build-components-split:
|
||||
name: Test split components
|
||||
name: Test components batch (${{ matrix.components }})
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
- test-build-components-splitter
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
max-parallel: ${{ (startsWith(github.base_ref, 'beta') || startsWith(github.base_ref, 'release')) && 8 || 4 }}
|
||||
matrix:
|
||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Show disk space
|
||||
run: |
|
||||
echo "Available disk space:"
|
||||
df -h
|
||||
|
||||
- name: List components
|
||||
run: echo ${{ matrix.components }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
- name: Cache apt packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
|
||||
with:
|
||||
packages: libsdl2-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Validate config
|
||||
- name: Validate and compile components with intelligent grouping
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e config -c $component
|
||||
done
|
||||
- name: Compile config
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mkdir build_cache
|
||||
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e compile -c $component
|
||||
done
|
||||
|
||||
# Check if /mnt has more free space than / before bind mounting
|
||||
# Extract available space in KB for comparison
|
||||
root_avail=$(df -k / | awk 'NR==2 {print $4}')
|
||||
mnt_avail=$(df -k /mnt 2>/dev/null | awk 'NR==2 {print $4}')
|
||||
|
||||
echo "Available space: / has ${root_avail}KB, /mnt has ${mnt_avail}KB"
|
||||
|
||||
# Only use /mnt if it has more space than /
|
||||
if [ -n "$mnt_avail" ] && [ "$mnt_avail" -gt "$root_avail" ]; then
|
||||
echo "Using /mnt for build files (more space available)"
|
||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||
sudo mkdir -p /mnt/platformio
|
||||
sudo chown $USER:$USER /mnt/platformio
|
||||
mkdir -p ~/.platformio
|
||||
sudo mount --bind /mnt/platformio ~/.platformio
|
||||
|
||||
# Bind mount test build directory to /mnt
|
||||
sudo mkdir -p /mnt/test_build_components_build
|
||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||
mkdir -p tests/test_build_components/build
|
||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||
else
|
||||
echo "Using / for build files (more space available than /mnt or /mnt unavailable)"
|
||||
fi
|
||||
|
||||
# Convert space-separated components to comma-separated for Python script
|
||||
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||
|
||||
# Only isolate directly changed components when targeting dev branch
|
||||
# For beta/release branches, group everything for faster CI
|
||||
#
|
||||
# WHY ISOLATE DIRECTLY CHANGED COMPONENTS?
|
||||
# - Isolated tests run WITHOUT --testing-mode, enabling full validation
|
||||
# - This catches pin conflicts and other issues in directly changed code
|
||||
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
|
||||
# - Dependencies are safe to group since they weren't modified in this PR
|
||||
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||
directly_changed_csv=""
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
else
|
||||
directly_changed_csv=$(echo '${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}' | jq -r 'join(",")')
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components: $directly_changed_csv"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Show disk space before validation (after bind mounts setup)
|
||||
echo "Disk space before config validation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run config validation with grouping and isolation
|
||||
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
echo ""
|
||||
echo "Config validation passed! Starting compilation..."
|
||||
echo ""
|
||||
|
||||
# Show disk space before compilation
|
||||
echo "Disk space before compilation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run compilation with grouping and isolation
|
||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
pre-commit-ci-lite:
|
||||
name: pre-commit.ci lite
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- common
|
||||
if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release'
|
||||
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
- uses: esphome/action@43cd1109c09c544d97196f7730ee5b2e0cc6d81e # v3.0.1 fork with pinned actions/cache
|
||||
env:
|
||||
SKIP: pylint,clang-tidy-hash
|
||||
- uses: pre-commit-ci/lite-action@v1.1.0
|
||||
- uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0
|
||||
if: always()
|
||||
|
||||
memory-impact-target-branch:
|
||||
name: Build target branch for memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
|
||||
outputs:
|
||||
ram_usage: ${{ steps.extract.outputs.ram_usage }}
|
||||
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
||||
cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }}
|
||||
skip: ${{ steps.check-script.outputs.skip }}
|
||||
steps:
|
||||
- name: Check out target branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
|
||||
# Check if memory impact extraction script exists on target branch
|
||||
# If not, skip the analysis (this handles older branches that don't have the feature)
|
||||
- name: Check for memory impact script
|
||||
id: check-script
|
||||
run: |
|
||||
if [ -f "script/ci_memory_impact_extract.py" ]; then
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
echo "::warning::ci_memory_impact_extract.py not found on target branch, skipping memory impact analysis"
|
||||
fi
|
||||
|
||||
# All remaining steps only run if script exists
|
||||
- name: Generate cache key
|
||||
id: cache-key
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
# Get the commit SHA of the target branch
|
||||
target_sha=$(git rev-parse HEAD)
|
||||
|
||||
# Hash the build infrastructure files (all files that affect build/analysis)
|
||||
infra_hash=$(cat \
|
||||
script/test_build_components.py \
|
||||
script/ci_memory_impact_extract.py \
|
||||
script/analyze_component_buses.py \
|
||||
script/merge_component_configs.py \
|
||||
script/ci_helpers.py \
|
||||
.github/workflows/ci.yml \
|
||||
| sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Get platform and components from job inputs
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
components_hash=$(echo "$components" | sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Combine into cache key
|
||||
cache_key="memory-analysis-target-${target_sha}-${infra_hash}-${platform}-${components_hash}"
|
||||
echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT
|
||||
echo "Cache key: ${cache_key}"
|
||||
|
||||
- name: Restore cached memory analysis
|
||||
id: cache-memory-analysis
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
|
||||
- name: Cache status
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then
|
||||
echo "✓ Cache hit! Using cached memory analysis results."
|
||||
echo " Skipping build step to save time."
|
||||
else
|
||||
echo "✗ Cache miss. Will build and analyze memory usage."
|
||||
fi
|
||||
|
||||
- name: Restore Python
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Build, compile, and analyze memory
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
id: build
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
|
||||
echo "Building with test_build_components.py for $platform with components:"
|
||||
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
|
||||
|
||||
# Use test_build_components.py which handles grouping automatically
|
||||
# Pass components as comma-separated list
|
||||
component_list=$(echo "$components" | jq -r 'join(",")')
|
||||
|
||||
echo "Compiling with test_build_components.py..."
|
||||
|
||||
# Run build and extract memory with auto-detection of build directory for detailed analysis
|
||||
# Use tee to show output in CI while also piping to extraction script
|
||||
python script/test_build_components.py \
|
||||
-e compile \
|
||||
-c "$component_list" \
|
||||
-t "$platform" 2>&1 | \
|
||||
tee /dev/stderr | \
|
||||
python script/ci_memory_impact_extract.py \
|
||||
--output-env \
|
||||
--output-json memory-analysis-target.json
|
||||
|
||||
# Add metadata to JSON before caching
|
||||
python script/ci_add_metadata_to_json.py \
|
||||
--json-file memory-analysis-target.json \
|
||||
--components "$components" \
|
||||
--platform "$platform"
|
||||
|
||||
- name: Save memory analysis to cache
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
|
||||
- name: Extract memory usage for outputs
|
||||
id: extract
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ -f memory-analysis-target.json ]; then
|
||||
ram=$(jq -r '.ram_bytes' memory-analysis-target.json)
|
||||
flash=$(jq -r '.flash_bytes' memory-analysis-target.json)
|
||||
echo "ram_usage=${ram}" >> $GITHUB_OUTPUT
|
||||
echo "flash_usage=${flash}" >> $GITHUB_OUTPUT
|
||||
echo "RAM: ${ram} bytes, Flash: ${flash} bytes"
|
||||
else
|
||||
echo "Error: memory-analysis-target.json not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload memory analysis JSON
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: memory-analysis-target
|
||||
path: memory-analysis-target.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 1
|
||||
|
||||
memory-impact-pr-branch:
|
||||
name: Build PR branch for memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
|
||||
outputs:
|
||||
ram_usage: ${{ steps.extract.outputs.ram_usage }}
|
||||
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
||||
steps:
|
||||
- name: Check out PR branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Cache platformio
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
- name: Build, compile, and analyze memory
|
||||
id: extract
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
|
||||
echo "Building with test_build_components.py for $platform with components:"
|
||||
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
|
||||
|
||||
# Use test_build_components.py which handles grouping automatically
|
||||
# Pass components as comma-separated list
|
||||
component_list=$(echo "$components" | jq -r 'join(",")')
|
||||
|
||||
echo "Compiling with test_build_components.py..."
|
||||
|
||||
# Run build and extract memory with auto-detection of build directory for detailed analysis
|
||||
# Use tee to show output in CI while also piping to extraction script
|
||||
python script/test_build_components.py \
|
||||
-e compile \
|
||||
-c "$component_list" \
|
||||
-t "$platform" 2>&1 | \
|
||||
tee /dev/stderr | \
|
||||
python script/ci_memory_impact_extract.py \
|
||||
--output-env \
|
||||
--output-json memory-analysis-pr.json
|
||||
|
||||
# Add metadata to JSON (components and platform are in shell variables above)
|
||||
python script/ci_add_metadata_to_json.py \
|
||||
--json-file memory-analysis-pr.json \
|
||||
--components "$components" \
|
||||
--platform "$platform"
|
||||
|
||||
- name: Upload memory analysis JSON
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: memory-analysis-pr
|
||||
path: memory-analysis-pr.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 1
|
||||
|
||||
memory-impact-comment:
|
||||
name: Comment memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
- memory-impact-target-branch
|
||||
- memory-impact-pr-branch
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Download target analysis JSON
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: memory-analysis-target
|
||||
path: ./memory-analysis
|
||||
continue-on-error: true
|
||||
- name: Download PR analysis JSON
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: memory-analysis-pr
|
||||
path: ./memory-analysis
|
||||
continue-on-error: true
|
||||
- name: Post or update PR comment
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
||||
# Pass JSON file paths directly to Python script
|
||||
# All data is extracted from JSON files for security
|
||||
python script/ci_memory_impact_comment.py \
|
||||
--pr-number "$PR_NUMBER" \
|
||||
--target-json ./memory-analysis/memory-analysis-target.json \
|
||||
--pr-json ./memory-analysis/memory-analysis-pr.json
|
||||
|
||||
ci-status:
|
||||
name: CI Status
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -480,12 +945,16 @@ jobs:
|
||||
- pylint
|
||||
- pytest
|
||||
- integration-tests
|
||||
- clang-tidy
|
||||
- clang-tidy-single
|
||||
- clang-tidy-nosplit
|
||||
- clang-tidy-split
|
||||
- determine-jobs
|
||||
- test-build-components
|
||||
- test-build-components-splitter
|
||||
- test-build-components-split
|
||||
- pre-commit-ci-lite
|
||||
- memory-impact-target-branch
|
||||
- memory-impact-pr-branch
|
||||
- memory-impact-comment
|
||||
if: always()
|
||||
steps:
|
||||
- name: Success
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Request reviews from component codeowners
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const owner = context.repo.owner;
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -54,11 +54,11 @@ jobs:
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
@@ -86,6 +86,6 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
2
.github/workflows/external-component-bot.yml
vendored
2
.github/workflows/external-component-bot.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Add external component comment
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/issue-codeowner-notify.yml
vendored
2
.github/workflows/issue-codeowner-notify.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Notify codeowners for component issues
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const owner = context.repo.owner;
|
||||
|
||||
34
.github/workflows/release.yml
vendored
34
.github/workflows/release.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
branch_build: ${{ steps.tag.outputs.branch_build }}
|
||||
deploy_env: ${{ steps.tag.outputs.deploy_env }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Get tag
|
||||
id: tag
|
||||
# yamllint disable rule:line-length
|
||||
@@ -60,9 +60,9 @@ jobs:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Build
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
pip3 install build
|
||||
python3 -m build
|
||||
- name: Publish
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
@@ -92,22 +92,22 @@ jobs:
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.11.1
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -138,7 +138,7 @@ jobs:
|
||||
# version: ${{ needs.init.outputs.tag }}
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: digests-${{ matrix.platform.arch }}
|
||||
path: /tmp/digests
|
||||
@@ -168,27 +168,27 @@ jobs:
|
||||
- ghcr
|
||||
- dockerhub
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: digests-*
|
||||
path: /tmp/digests
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.11.1
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Log in to docker hub
|
||||
if: matrix.registry == 'dockerhub'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
if: matrix.registry == 'ghcr'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
- deploy-manifest
|
||||
steps:
|
||||
- name: Trigger Workflow
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
|
||||
script: |
|
||||
@@ -246,7 +246,7 @@ jobs:
|
||||
environment: ${{ needs.init.outputs.deploy_env }}
|
||||
steps:
|
||||
- name: Trigger Workflow
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.DEPLOY_ESPHOME_SCHEMA_REPO_TOKEN }}
|
||||
script: |
|
||||
|
||||
52
.github/workflows/stale.yml
vendored
52
.github/workflows/stale.yml
vendored
@@ -15,36 +15,52 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
if: github.repository_owner == 'esphome'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10.0.0
|
||||
- name: Stale
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
debug-only: ${{ github.ref != 'refs/heads/dev' }} # Dry-run when not run on dev branch
|
||||
remove-stale-when-updated: true
|
||||
operations-per-run: 400
|
||||
|
||||
# The 90 day stale policy for PRs
|
||||
# - PRs
|
||||
# - No PRs marked as "not-stale"
|
||||
# - No Issues (see below)
|
||||
days-before-pr-stale: 90
|
||||
days-before-pr-close: 7
|
||||
days-before-issue-stale: -1
|
||||
days-before-issue-close: -1
|
||||
remove-stale-when-updated: true
|
||||
stale-pr-label: "stale"
|
||||
exempt-pr-labels: "not-stale"
|
||||
stale-pr-message: >
|
||||
There hasn't been any activity on this pull request recently. This
|
||||
pull request has been automatically marked as stale because of that
|
||||
and will be closed if no further activity occurs within 7 days.
|
||||
Thank you for your contributions.
|
||||
|
||||
# Use stale to automatically close issues with a
|
||||
# reference to the issue tracker
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10.0.0
|
||||
with:
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
days-before-issue-stale: 1
|
||||
days-before-issue-close: 1
|
||||
remove-stale-when-updated: true
|
||||
If you are the author of this PR, please leave a comment if you want
|
||||
to keep it open. Also, please rebase your PR onto the latest dev
|
||||
branch to ensure that it's up to date with the latest changes.
|
||||
|
||||
Thank you for your contribution!
|
||||
|
||||
# The 90 day stale policy for Issues
|
||||
# - Issues
|
||||
# - No Issues marked as "not-stale"
|
||||
# - No PRs (see above)
|
||||
days-before-issue-stale: 90
|
||||
days-before-issue-close: 7
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "not-stale"
|
||||
stale-issue-message: >
|
||||
https://github.com/esphome/esphome/issues/430
|
||||
There hasn't been any activity on this issue recently. Due to the
|
||||
high number of incoming GitHub notifications, we have to clean some
|
||||
of the old issues, as many of them have already been resolved with
|
||||
the latest updates.
|
||||
|
||||
Please make sure to update to the latest ESPHome version and
|
||||
check if that solves the issue. Let us know if that works for you by
|
||||
adding a comment 👍
|
||||
|
||||
This issue has now been marked as stale and will be closed if no
|
||||
further activity occurs. Thank you for your contributions.
|
||||
|
||||
2
.github/workflows/status-check-labels.yml
vendored
2
.github/workflows/status-check-labels.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
- merge-after-release
|
||||
steps:
|
||||
- name: Check for ${{ matrix.label }} label
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
|
||||
|
||||
13
.github/workflows/sync-device-classes.yml
vendored
13
.github/workflows/sync-device-classes.yml
vendored
@@ -13,16 +13,16 @@ jobs:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Checkout Home Assistant
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
repository: home-assistant/core
|
||||
path: lib/home-assistant
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
|
||||
@@ -30,13 +30,18 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -e lib/home-assistant
|
||||
pip install -r requirements_test.txt pre-commit
|
||||
|
||||
- name: Sync
|
||||
run: |
|
||||
python ./script/sync-device_class.py
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
run: |
|
||||
python script/run-in-env.py pre-commit run --all-files
|
||||
|
||||
- name: Commit changes
|
||||
uses: peter-evans/create-pull-request@v7.0.8
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
commit-message: "Synchronise Device Classes from Home Assistant"
|
||||
committer: esphomebot <esphome@openhomefoundation.org>
|
||||
|
||||
@@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.12
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -62,6 +62,7 @@ esphome/components/bedjet/fan/* @jhansche
|
||||
esphome/components/bedjet/sensor/* @javawizard @jhansche
|
||||
esphome/components/beken_spi_led_strip/* @Mat931
|
||||
esphome/components/bh1750/* @OttoWinter
|
||||
esphome/components/bh1900nux/* @B48D81EFCC
|
||||
esphome/components/binary_sensor/* @esphome/core
|
||||
esphome/components/bk72xx/* @kuba2k2
|
||||
esphome/components/bl0906/* @athom-tech @jesserockz @tarontop
|
||||
@@ -69,6 +70,7 @@ esphome/components/bl0939/* @ziceva
|
||||
esphome/components/bl0940/* @dan-s-github @tobias-
|
||||
esphome/components/bl0942/* @dbuezas @dwmw2
|
||||
esphome/components/ble_client/* @buxtronix @clydebarrow
|
||||
esphome/components/ble_nus/* @tomaszduda23
|
||||
esphome/components/bluetooth_proxy/* @bdraco @jesserockz
|
||||
esphome/components/bme280_base/* @esphome/core
|
||||
esphome/components/bme280_spi/* @apbodrov
|
||||
@@ -139,6 +141,7 @@ esphome/components/ens160_base/* @latonita @vincentscode
|
||||
esphome/components/ens160_i2c/* @latonita
|
||||
esphome/components/ens160_spi/* @latonita
|
||||
esphome/components/ens210/* @itn3rd77
|
||||
esphome/components/epaper_spi/* @esphome/core
|
||||
esphome/components/es7210/* @kahrendt
|
||||
esphome/components/es7243e/* @kbx81
|
||||
esphome/components/es8156/* @kbx81
|
||||
@@ -160,7 +163,6 @@ esphome/components/esp_ldo/* @clydebarrow
|
||||
esphome/components/espnow/* @jesserockz
|
||||
esphome/components/ethernet_info/* @gtjadsonsantos
|
||||
esphome/components/event/* @nohat
|
||||
esphome/components/event_emitter/* @Rapsssito
|
||||
esphome/components/exposure_notifications/* @OttoWinter
|
||||
esphome/components/ezo/* @ssieb
|
||||
esphome/components/ezo_pmp/* @carlos-sarmiento
|
||||
@@ -257,6 +259,7 @@ esphome/components/libretiny_pwm/* @kuba2k2
|
||||
esphome/components/light/* @esphome/core
|
||||
esphome/components/lightwaverf/* @max246
|
||||
esphome/components/lilygo_t5_47/touchscreen/* @jesserockz
|
||||
esphome/components/lm75b/* @beormund
|
||||
esphome/components/ln882x/* @lamauny
|
||||
esphome/components/lock/* @esphome/core
|
||||
esphome/components/logger/* @esphome/core
|
||||
@@ -407,6 +410,7 @@ esphome/components/sensor/* @esphome/core
|
||||
esphome/components/sfa30/* @ghsensdev
|
||||
esphome/components/sgp40/* @SenexCrenshaw
|
||||
esphome/components/sgp4x/* @martgras @SenexCrenshaw
|
||||
esphome/components/sha256/* @esphome/core
|
||||
esphome/components/shelly_dimmer/* @edge90 @rnauber
|
||||
esphome/components/sht3xd/* @mrtoy-me
|
||||
esphome/components/sht4x/* @sjtrny
|
||||
@@ -428,6 +432,7 @@ esphome/components/speaker/media_player/* @kahrendt @synesthesiam
|
||||
esphome/components/spi/* @clydebarrow @esphome/core
|
||||
esphome/components/spi_device/* @clydebarrow
|
||||
esphome/components/spi_led_strip/* @clydebarrow
|
||||
esphome/components/split_buffer/* @jesserockz
|
||||
esphome/components/sprinkler/* @kbx81
|
||||
esphome/components/sps30/* @martgras
|
||||
esphome/components/ssd1322_base/* @kbx81
|
||||
@@ -533,6 +538,7 @@ esphome/components/wk2204_spi/* @DrCoolZic
|
||||
esphome/components/wk2212_i2c/* @DrCoolZic
|
||||
esphome/components/wk2212_spi/* @DrCoolZic
|
||||
esphome/components/wl_134/* @hobbypunk90
|
||||
esphome/components/wts01/* @alepee
|
||||
esphome/components/x9c/* @EtienneMD
|
||||
esphome/components/xgzp68xx/* @gcormier
|
||||
esphome/components/xiaomi_hhccjcy10/* @fariouche
|
||||
@@ -548,3 +554,4 @@ esphome/components/xxtea/* @clydebarrow
|
||||
esphome/components/zephyr/* @tomaszduda23
|
||||
esphome/components/zhlt01/* @cfeenstra1024
|
||||
esphome/components/zio_ultrasonic/* @kahrendt
|
||||
esphome/components/zwave_proxy/* @kbx81
|
||||
|
||||
2
Doxyfile
2
Doxyfile
@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2025.9.1
|
||||
PROJECT_NUMBER = 2025.11.0-dev
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -6,6 +6,7 @@ import getpass
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
@@ -13,9 +14,11 @@ from typing import Protocol
|
||||
|
||||
import argcomplete
|
||||
|
||||
# Note: Do not import modules from esphome.components here, as this would
|
||||
# cause them to be loaded before external components are processed, resulting
|
||||
# in the built-in version being used instead of the external component one.
|
||||
from esphome import const, writer, yaml_util
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.mqtt import CONF_DISCOVER_IP
|
||||
from esphome.config import iter_component_configs, read_config, strip_default_ids
|
||||
from esphome.const import (
|
||||
ALLOWED_NAME_CHARS,
|
||||
@@ -114,6 +117,25 @@ class Purpose(StrEnum):
|
||||
LOGGING = "logging"
|
||||
|
||||
|
||||
class PortType(StrEnum):
|
||||
SERIAL = "SERIAL"
|
||||
NETWORK = "NETWORK"
|
||||
MQTT = "MQTT"
|
||||
MQTTIP = "MQTTIP"
|
||||
|
||||
|
||||
# Magic MQTT port types that require special handling
|
||||
_MQTT_PORT_TYPES = frozenset({PortType.MQTT, PortType.MQTTIP})
|
||||
|
||||
|
||||
def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
|
||||
"""Resolve an address using cache if available, otherwise return the address itself."""
|
||||
if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
|
||||
_LOGGER.debug("Using cached addresses for %s: %s", purpose.value, cached)
|
||||
return cached
|
||||
return [address]
|
||||
|
||||
|
||||
def choose_upload_log_host(
|
||||
default: list[str] | str | None,
|
||||
check_default: str | None,
|
||||
@@ -142,7 +164,7 @@ def choose_upload_log_host(
|
||||
(purpose == Purpose.LOGGING and has_api())
|
||||
or (purpose == Purpose.UPLOADING and has_ota())
|
||||
):
|
||||
resolved.append(CORE.address)
|
||||
resolved.extend(_resolve_with_cache(CORE.address, purpose))
|
||||
|
||||
if purpose == Purpose.LOGGING:
|
||||
if has_api() and has_mqtt_ip_lookup():
|
||||
@@ -152,19 +174,20 @@ def choose_upload_log_host(
|
||||
resolved.append("MQTT")
|
||||
|
||||
if has_api() and has_non_ip_address():
|
||||
resolved.append(CORE.address)
|
||||
resolved.extend(_resolve_with_cache(CORE.address, purpose))
|
||||
|
||||
elif purpose == Purpose.UPLOADING:
|
||||
if has_ota() and has_mqtt_ip_lookup():
|
||||
resolved.append("MQTTIP")
|
||||
|
||||
if has_ota() and has_non_ip_address():
|
||||
resolved.append(CORE.address)
|
||||
|
||||
resolved.extend(_resolve_with_cache(CORE.address, purpose))
|
||||
else:
|
||||
resolved.append(device)
|
||||
if not resolved:
|
||||
_LOGGER.error("All specified devices: %s could not be resolved.", defaults)
|
||||
raise EsphomeError(
|
||||
f"All specified devices {defaults} could not be resolved. Is the device connected to the network?"
|
||||
)
|
||||
return resolved
|
||||
|
||||
# No devices specified, show interactive chooser
|
||||
@@ -232,6 +255,8 @@ def has_ota() -> bool:
|
||||
|
||||
def has_mqtt_ip_lookup() -> bool:
|
||||
"""Check if MQTT is available and IP lookup is supported."""
|
||||
from esphome.components.mqtt import CONF_DISCOVER_IP
|
||||
|
||||
if CONF_MQTT not in CORE.config:
|
||||
return False
|
||||
# Default Enabled
|
||||
@@ -256,8 +281,10 @@ def has_ip_address() -> bool:
|
||||
|
||||
|
||||
def has_resolvable_address() -> bool:
|
||||
"""Check if CORE.address is resolvable (via mDNS or is an IP address)."""
|
||||
return has_mdns() or has_ip_address()
|
||||
"""Check if CORE.address is resolvable (via mDNS, DNS, or is an IP address)."""
|
||||
# Any address (IP, mDNS hostname, or regular DNS hostname) is resolvable
|
||||
# The resolve_ip_address() function in helpers.py handles all types via AsyncResolver
|
||||
return CORE.address is not None
|
||||
|
||||
|
||||
def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str):
|
||||
@@ -266,16 +293,67 @@ def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str
|
||||
return mqtt.get_esphome_device_ip(config, username, password, client_id)
|
||||
|
||||
|
||||
_PORT_TO_PORT_TYPE = {
|
||||
"MQTT": "MQTT",
|
||||
"MQTTIP": "MQTTIP",
|
||||
}
|
||||
def _resolve_network_devices(
|
||||
devices: list[str], config: ConfigType, args: ArgsProtocol
|
||||
) -> list[str]:
|
||||
"""Resolve device list, converting MQTT magic strings to actual IP addresses.
|
||||
|
||||
This function filters the devices list to:
|
||||
- Replace MQTT/MQTTIP magic strings with actual IP addresses via MQTT lookup
|
||||
- Deduplicate addresses while preserving order
|
||||
- Only resolve MQTT once even if multiple MQTT strings are present
|
||||
- If MQTT resolution fails, log a warning and continue with other devices
|
||||
|
||||
Args:
|
||||
devices: List of device identifiers (IPs, hostnames, or magic strings)
|
||||
config: ESPHome configuration
|
||||
args: Command-line arguments containing MQTT credentials
|
||||
|
||||
Returns:
|
||||
List of network addresses suitable for connection attempts
|
||||
"""
|
||||
network_devices: list[str] = []
|
||||
mqtt_resolved: bool = False
|
||||
|
||||
for device in devices:
|
||||
port_type = get_port_type(device)
|
||||
if port_type in _MQTT_PORT_TYPES:
|
||||
# Only resolve MQTT once, even if multiple MQTT entries
|
||||
if not mqtt_resolved:
|
||||
try:
|
||||
mqtt_ips = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
network_devices.extend(mqtt_ips)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.warning(
|
||||
"MQTT IP discovery failed (%s), will try other devices if available",
|
||||
err,
|
||||
)
|
||||
mqtt_resolved = True
|
||||
elif device not in network_devices:
|
||||
# Regular network address or IP - add if not already present
|
||||
network_devices.append(device)
|
||||
|
||||
return network_devices
|
||||
|
||||
|
||||
def get_port_type(port: str) -> str:
|
||||
def get_port_type(port: str) -> PortType:
|
||||
"""Determine the type of port/device identifier.
|
||||
|
||||
Returns:
|
||||
PortType.SERIAL for serial ports (/dev/ttyUSB0, COM1, etc.)
|
||||
PortType.MQTT for MQTT logging
|
||||
PortType.MQTTIP for MQTT IP lookup
|
||||
PortType.NETWORK for IP addresses, hostnames, or mDNS names
|
||||
"""
|
||||
if port.startswith("/") or port.startswith("COM"):
|
||||
return "SERIAL"
|
||||
return _PORT_TO_PORT_TYPE.get(port, "NETWORK")
|
||||
return PortType.SERIAL
|
||||
if port == "MQTT":
|
||||
return PortType.MQTT
|
||||
if port == "MQTTIP":
|
||||
return PortType.MQTTIP
|
||||
return PortType.NETWORK
|
||||
|
||||
|
||||
def run_miniterm(config: ConfigType, port: str, args) -> int:
|
||||
@@ -390,7 +468,9 @@ def write_cpp_file() -> int:
|
||||
def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
from esphome import platformio_api
|
||||
|
||||
_LOGGER.info("Compiling app...")
|
||||
# NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py
|
||||
# If you change this format, update the regex in that script as well
|
||||
_LOGGER.info("Compiling app... Build path: %s", CORE.build_path)
|
||||
rc = platformio_api.run_compile(config, CORE.verbose)
|
||||
if rc != 0:
|
||||
return rc
|
||||
@@ -445,7 +525,7 @@ def upload_using_esptool(
|
||||
"detect",
|
||||
]
|
||||
for img in flash_images:
|
||||
cmd += [img.offset, img.path]
|
||||
cmd += [img.offset, str(img.path)]
|
||||
|
||||
if os.environ.get("ESPHOME_USE_SUBPROCESS") is None:
|
||||
import esptool
|
||||
@@ -475,7 +555,7 @@ def upload_using_platformio(config: ConfigType, port: str):
|
||||
|
||||
|
||||
def check_permissions(port: str):
|
||||
if os.name == "posix" and get_port_type(port) == "SERIAL":
|
||||
if os.name == "posix" and get_port_type(port) == PortType.SERIAL:
|
||||
# Check if we can open selected serial port
|
||||
if not os.access(port, os.F_OK):
|
||||
raise EsphomeError(
|
||||
@@ -503,7 +583,7 @@ def upload_program(
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if get_port_type(host) == "SERIAL":
|
||||
if get_port_type(host) == PortType.SERIAL:
|
||||
check_permissions(host)
|
||||
|
||||
exit_code = 1
|
||||
@@ -530,14 +610,16 @@ def upload_program(
|
||||
from esphome import espota2
|
||||
|
||||
remote_port = int(ota_conf[CONF_PORT])
|
||||
password = ota_conf.get(CONF_PASSWORD, "")
|
||||
binary = args.file if getattr(args, "file", None) is not None else CORE.firmware_bin
|
||||
password = ota_conf.get(CONF_PASSWORD)
|
||||
if getattr(args, "file", None) is not None:
|
||||
binary = Path(args.file)
|
||||
else:
|
||||
binary = CORE.firmware_bin
|
||||
|
||||
# MQTT address resolution
|
||||
if get_port_type(host) in ("MQTT", "MQTTIP"):
|
||||
devices = mqtt_get_ip(config, args.username, args.password, args.client_id)
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
network_devices = _resolve_network_devices(devices, config, args)
|
||||
|
||||
return espota2.run_ota(devices, remote_port, password, binary)
|
||||
return espota2.run_ota(network_devices, remote_port, password, binary)
|
||||
|
||||
|
||||
def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int | None:
|
||||
@@ -552,32 +634,22 @@ def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int
|
||||
raise EsphomeError("Logger is not configured!")
|
||||
|
||||
port = devices[0]
|
||||
port_type = get_port_type(port)
|
||||
|
||||
if get_port_type(port) == "SERIAL":
|
||||
if port_type == PortType.SERIAL:
|
||||
check_permissions(port)
|
||||
return run_miniterm(config, port, args)
|
||||
|
||||
port_type = get_port_type(port)
|
||||
|
||||
# Check if we should use API for logging
|
||||
if has_api():
|
||||
addresses_to_use: list[str] | None = None
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
if has_api() and (
|
||||
network_devices := _resolve_network_devices(devices, config, args)
|
||||
):
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
if port_type == "NETWORK" and (has_mdns() or is_ip_address(port)):
|
||||
addresses_to_use = devices
|
||||
elif port_type in ("NETWORK", "MQTT", "MQTTIP") and has_mqtt_ip_lookup():
|
||||
# Only use MQTT IP lookup if the first condition didn't match
|
||||
# (for MQTT/MQTTIP types, or for NETWORK when mdns/ip check fails)
|
||||
addresses_to_use = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
return run_logs(config, network_devices)
|
||||
|
||||
if addresses_to_use is not None:
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
return run_logs(config, addresses_to_use)
|
||||
|
||||
if port_type in ("NETWORK", "MQTT") and has_mqtt_logging():
|
||||
if port_type in (PortType.NETWORK, PortType.MQTT) and has_mqtt_logging():
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.show_logs(
|
||||
@@ -598,7 +670,7 @@ def clean_mqtt(config: ConfigType, args: ArgsProtocol) -> int | None:
|
||||
def command_wizard(args: ArgsProtocol) -> int | None:
|
||||
from esphome import wizard
|
||||
|
||||
return wizard.wizard(args.configuration)
|
||||
return wizard.wizard(Path(args.configuration))
|
||||
|
||||
|
||||
def command_config(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
@@ -720,6 +792,16 @@ def command_clean_mqtt(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
return clean_mqtt(config, args)
|
||||
|
||||
|
||||
def command_clean_all(args: ArgsProtocol) -> int | None:
|
||||
try:
|
||||
writer.clean_all(args.configuration)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error cleaning all files: %s", err)
|
||||
return 1
|
||||
_LOGGER.info("Done!")
|
||||
return 0
|
||||
|
||||
|
||||
def command_mqtt_fingerprint(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
from esphome import mqtt
|
||||
|
||||
@@ -761,7 +843,7 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
safe_print(f"{half_line}{middle_text}{half_line}")
|
||||
|
||||
for f in files:
|
||||
safe_print(f"Updating {color(AnsiFore.CYAN, f)}")
|
||||
safe_print(f"Updating {color(AnsiFore.CYAN, str(f))}")
|
||||
safe_print("-" * twidth)
|
||||
safe_print()
|
||||
if CORE.dashboard:
|
||||
@@ -773,10 +855,10 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
"esphome", "run", f, "--no-logs", "--device", "OTA"
|
||||
)
|
||||
if rc == 0:
|
||||
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {f}")
|
||||
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {str(f)}")
|
||||
success[f] = True
|
||||
else:
|
||||
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {f}")
|
||||
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {str(f)}")
|
||||
success[f] = False
|
||||
|
||||
safe_print()
|
||||
@@ -787,9 +869,9 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
||||
failed = 0
|
||||
for f in files:
|
||||
if success[f]:
|
||||
safe_print(f" - {f}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
||||
safe_print(f" - {str(f)}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
||||
else:
|
||||
safe_print(f" - {f}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
|
||||
safe_print(f" - {str(f)}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
|
||||
failed += 1
|
||||
return failed
|
||||
|
||||
@@ -811,7 +893,8 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
|
||||
|
||||
def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
for c in args.name:
|
||||
new_name = args.name
|
||||
for c in new_name:
|
||||
if c not in ALLOWED_NAME_CHARS:
|
||||
print(
|
||||
color(
|
||||
@@ -822,8 +905,7 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
)
|
||||
return 1
|
||||
# Load existing yaml file
|
||||
with open(CORE.config_path, mode="r+", encoding="utf-8") as raw_file:
|
||||
raw_contents = raw_file.read()
|
||||
raw_contents = CORE.config_path.read_text(encoding="utf-8")
|
||||
|
||||
yaml = yaml_util.load_yaml(CORE.config_path)
|
||||
if CONF_ESPHOME not in yaml or CONF_NAME not in yaml[CONF_ESPHOME]:
|
||||
@@ -838,7 +920,7 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
if match is None:
|
||||
new_raw = re.sub(
|
||||
rf"name:\s+[\"']?{old_name}[\"']?",
|
||||
f'name: "{args.name}"',
|
||||
f'name: "{new_name}"',
|
||||
raw_contents,
|
||||
)
|
||||
else:
|
||||
@@ -858,29 +940,28 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
|
||||
new_raw = re.sub(
|
||||
rf"^(\s+{match.group(1)}):\s+[\"']?{old_name}[\"']?",
|
||||
f'\\1: "{args.name}"',
|
||||
f'\\1: "{new_name}"',
|
||||
raw_contents,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
new_path = os.path.join(CORE.config_dir, args.name + ".yaml")
|
||||
new_path: Path = CORE.config_dir / (new_name + ".yaml")
|
||||
print(
|
||||
f"Updating {color(AnsiFore.CYAN, CORE.config_path)} to {color(AnsiFore.CYAN, new_path)}"
|
||||
f"Updating {color(AnsiFore.CYAN, str(CORE.config_path))} to {color(AnsiFore.CYAN, str(new_path))}"
|
||||
)
|
||||
print()
|
||||
|
||||
with open(new_path, mode="w", encoding="utf-8") as new_file:
|
||||
new_file.write(new_raw)
|
||||
new_path.write_text(new_raw, encoding="utf-8")
|
||||
|
||||
rc = run_external_process("esphome", "config", new_path)
|
||||
rc = run_external_process("esphome", "config", str(new_path))
|
||||
if rc != 0:
|
||||
print(color(AnsiFore.BOLD_RED, "Rename failed. Reverting changes."))
|
||||
os.remove(new_path)
|
||||
new_path.unlink()
|
||||
return 1
|
||||
|
||||
cli_args = [
|
||||
"run",
|
||||
new_path,
|
||||
str(new_path),
|
||||
"--no-logs",
|
||||
"--device",
|
||||
CORE.address,
|
||||
@@ -894,11 +975,11 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
except KeyboardInterrupt:
|
||||
rc = 1
|
||||
if rc != 0:
|
||||
os.remove(new_path)
|
||||
new_path.unlink()
|
||||
return 1
|
||||
|
||||
if CORE.config_path != new_path:
|
||||
os.remove(CORE.config_path)
|
||||
CORE.config_path.unlink()
|
||||
|
||||
print(color(AnsiFore.BOLD_GREEN, "SUCCESS"))
|
||||
print()
|
||||
@@ -911,6 +992,7 @@ PRE_CONFIG_ACTIONS = {
|
||||
"dashboard": command_dashboard,
|
||||
"vscode": command_vscode,
|
||||
"update-all": command_update_all,
|
||||
"clean-all": command_clean_all,
|
||||
}
|
||||
|
||||
POST_CONFIG_ACTIONS = {
|
||||
@@ -919,9 +1001,9 @@ POST_CONFIG_ACTIONS = {
|
||||
"upload": command_upload,
|
||||
"logs": command_logs,
|
||||
"run": command_run,
|
||||
"clean": command_clean,
|
||||
"clean-mqtt": command_clean_mqtt,
|
||||
"mqtt-fingerprint": command_mqtt_fingerprint,
|
||||
"clean": command_clean,
|
||||
"idedata": command_idedata,
|
||||
"rename": command_rename,
|
||||
"discover": command_discover,
|
||||
@@ -965,6 +1047,24 @@ def parse_args(argv):
|
||||
help="Add a substitution",
|
||||
metavar=("key", "value"),
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"--mdns-address-cache",
|
||||
help="mDNS address cache mapping in format 'hostname=ip1,ip2'",
|
||||
action="append",
|
||||
default=[],
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"--dns-address-cache",
|
||||
help="DNS address cache mapping in format 'hostname=ip1,ip2'",
|
||||
action="append",
|
||||
default=[],
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"--testing-mode",
|
||||
help="Enable testing mode (disables validation checks for grouped component testing)",
|
||||
action="store_true",
|
||||
default=False,
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description=f"ESPHome {const.__version__}", parents=[options_parser]
|
||||
@@ -1122,6 +1222,13 @@ def parse_args(argv):
|
||||
"configuration", help="Your YAML configuration file(s).", nargs="+"
|
||||
)
|
||||
|
||||
parser_clean_all = subparsers.add_parser(
|
||||
"clean-all", help="Clean all build and platform files."
|
||||
)
|
||||
parser_clean_all.add_argument(
|
||||
"configuration", help="Your YAML configuration directory.", nargs="*"
|
||||
)
|
||||
|
||||
parser_dashboard = subparsers.add_parser(
|
||||
"dashboard", help="Create a simple web server for a dashboard."
|
||||
)
|
||||
@@ -1168,7 +1275,7 @@ def parse_args(argv):
|
||||
|
||||
parser_update = subparsers.add_parser("update-all")
|
||||
parser_update.add_argument(
|
||||
"configuration", help="Your YAML configuration file directories.", nargs="+"
|
||||
"configuration", help="Your YAML configuration file or directory.", nargs="+"
|
||||
)
|
||||
|
||||
parser_idedata = subparsers.add_parser("idedata")
|
||||
@@ -1212,9 +1319,16 @@ def parse_args(argv):
|
||||
|
||||
|
||||
def run_esphome(argv):
|
||||
from esphome.address_cache import AddressCache
|
||||
|
||||
args = parse_args(argv)
|
||||
CORE.dashboard = args.dashboard
|
||||
CORE.testing_mode = args.testing_mode
|
||||
|
||||
# Create address cache from command-line arguments
|
||||
CORE.address_cache = AddressCache.from_cli_args(
|
||||
args.mdns_address_cache, args.dns_address_cache
|
||||
)
|
||||
# Override log level if verbose is set
|
||||
if args.verbose:
|
||||
args.log_level = "DEBUG"
|
||||
@@ -1237,14 +1351,20 @@ def run_esphome(argv):
|
||||
_LOGGER.info("ESPHome %s", const.__version__)
|
||||
|
||||
for conf_path in args.configuration:
|
||||
if any(os.path.basename(conf_path) == x for x in SECRETS_FILES):
|
||||
conf_path = Path(conf_path)
|
||||
if any(conf_path.name == x for x in SECRETS_FILES):
|
||||
_LOGGER.warning("Skipping secrets file %s", conf_path)
|
||||
continue
|
||||
|
||||
CORE.config_path = conf_path
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
config = read_config(dict(args.substitution) if args.substitution else {})
|
||||
# For logs command, skip updating external components
|
||||
skip_external = args.command == "logs"
|
||||
config = read_config(
|
||||
dict(args.substitution) if args.substitution else {},
|
||||
skip_external_update=skip_external,
|
||||
)
|
||||
if config is None:
|
||||
return 2
|
||||
CORE.config = config
|
||||
|
||||
142
esphome/address_cache.py
Normal file
142
esphome/address_cache.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Address cache for DNS and mDNS lookups."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normalize_hostname(hostname: str) -> str:
|
||||
"""Normalize hostname for cache lookups.
|
||||
|
||||
Removes trailing dots and converts to lowercase.
|
||||
"""
|
||||
return hostname.rstrip(".").lower()
|
||||
|
||||
|
||||
class AddressCache:
|
||||
"""Cache for DNS and mDNS address lookups.
|
||||
|
||||
This cache stores pre-resolved addresses from command-line arguments
|
||||
to avoid slow DNS/mDNS lookups during builds.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mdns_cache: dict[str, list[str]] | None = None,
|
||||
dns_cache: dict[str, list[str]] | None = None,
|
||||
) -> None:
|
||||
"""Initialize the address cache.
|
||||
|
||||
Args:
|
||||
mdns_cache: Pre-populated mDNS addresses (hostname -> IPs)
|
||||
dns_cache: Pre-populated DNS addresses (hostname -> IPs)
|
||||
"""
|
||||
self.mdns_cache = mdns_cache or {}
|
||||
self.dns_cache = dns_cache or {}
|
||||
|
||||
def _get_cached_addresses(
|
||||
self, hostname: str, cache: dict[str, list[str]], cache_type: str
|
||||
) -> list[str] | None:
|
||||
"""Get cached addresses from a specific cache.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
cache: The cache dictionary to check
|
||||
cache_type: Type of cache for logging ("mDNS" or "DNS")
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
normalized = normalize_hostname(hostname)
|
||||
if addresses := cache.get(normalized):
|
||||
_LOGGER.debug("Using %s cache for %s: %s", cache_type, hostname, addresses)
|
||||
return addresses
|
||||
return None
|
||||
|
||||
def get_mdns_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached mDNS addresses for a hostname.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up (should end with .local)
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
return self._get_cached_addresses(hostname, self.mdns_cache, "mDNS")
|
||||
|
||||
def get_dns_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached DNS addresses for a hostname.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
return self._get_cached_addresses(hostname, self.dns_cache, "DNS")
|
||||
|
||||
def get_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached addresses for a hostname.
|
||||
|
||||
Checks mDNS cache for .local domains, DNS cache otherwise.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
normalized = normalize_hostname(hostname)
|
||||
if normalized.endswith(".local"):
|
||||
return self.get_mdns_addresses(hostname)
|
||||
return self.get_dns_addresses(hostname)
|
||||
|
||||
def has_cache(self) -> bool:
|
||||
"""Check if any cache entries exist."""
|
||||
return bool(self.mdns_cache or self.dns_cache)
|
||||
|
||||
@classmethod
|
||||
def from_cli_args(
|
||||
cls, mdns_args: Iterable[str], dns_args: Iterable[str]
|
||||
) -> AddressCache:
|
||||
"""Create cache from command-line arguments.
|
||||
|
||||
Args:
|
||||
mdns_args: List of mDNS cache entries like ['host=ip1,ip2']
|
||||
dns_args: List of DNS cache entries like ['host=ip1,ip2']
|
||||
|
||||
Returns:
|
||||
Configured AddressCache instance
|
||||
"""
|
||||
mdns_cache = cls._parse_cache_args(mdns_args)
|
||||
dns_cache = cls._parse_cache_args(dns_args)
|
||||
return cls(mdns_cache=mdns_cache, dns_cache=dns_cache)
|
||||
|
||||
@staticmethod
|
||||
def _parse_cache_args(cache_args: Iterable[str]) -> dict[str, list[str]]:
|
||||
"""Parse cache arguments into a dictionary.
|
||||
|
||||
Args:
|
||||
cache_args: List of cache mappings like ['host1=ip1,ip2', 'host2=ip3']
|
||||
|
||||
Returns:
|
||||
Dictionary mapping normalized hostnames to list of IP addresses
|
||||
"""
|
||||
cache: dict[str, list[str]] = {}
|
||||
for arg in cache_args:
|
||||
if "=" not in arg:
|
||||
_LOGGER.warning(
|
||||
"Invalid cache format: %s (expected 'hostname=ip1,ip2')", arg
|
||||
)
|
||||
continue
|
||||
hostname, ips = arg.split("=", 1)
|
||||
# Normalize hostname for consistent lookups
|
||||
normalized = normalize_hostname(hostname)
|
||||
cache[normalized] = [ip.strip() for ip in ips.split(",")]
|
||||
return cache
|
||||
502
esphome/analyze_memory/__init__.py
Normal file
502
esphome/analyze_memory/__init__.py
Normal file
@@ -0,0 +1,502 @@
|
||||
"""Memory usage analyzer for ESPHome compiled binaries."""
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .const import (
|
||||
CORE_SUBCATEGORY_PATTERNS,
|
||||
DEMANGLED_PATTERNS,
|
||||
ESPHOME_COMPONENT_PATTERN,
|
||||
SECTION_TO_ATTR,
|
||||
SYMBOL_PATTERNS,
|
||||
)
|
||||
from .helpers import (
|
||||
get_component_class_patterns,
|
||||
get_esphome_components,
|
||||
map_section_name,
|
||||
parse_symbol_line,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from esphome.platformio_api import IDEData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# GCC global constructor/destructor prefix annotations
|
||||
_GCC_PREFIX_ANNOTATIONS = {
|
||||
"_GLOBAL__sub_I_": "global constructor for",
|
||||
"_GLOBAL__sub_D_": "global destructor for",
|
||||
}
|
||||
|
||||
# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2)
|
||||
_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)")
|
||||
|
||||
# C++ runtime patterns for categorization
|
||||
_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"])
|
||||
|
||||
# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.)
|
||||
_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"])
|
||||
|
||||
# Regex pattern for parsing readelf section headers
|
||||
# Format: [ #] name type addr off size
|
||||
_READELF_SECTION_PATTERN = re.compile(
|
||||
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)"
|
||||
)
|
||||
|
||||
# Component category prefixes
|
||||
_COMPONENT_PREFIX_ESPHOME = "[esphome]"
|
||||
_COMPONENT_PREFIX_EXTERNAL = "[external]"
|
||||
_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core"
|
||||
_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api"
|
||||
|
||||
# C++ namespace prefixes
|
||||
_NAMESPACE_ESPHOME = "esphome::"
|
||||
_NAMESPACE_STD = "std::"
|
||||
|
||||
# Type alias for symbol information: (symbol_name, size, component)
|
||||
SymbolInfoType = tuple[str, int, str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MemorySection:
|
||||
"""Represents a memory section with its symbols."""
|
||||
|
||||
name: str
|
||||
symbols: list[SymbolInfoType] = field(default_factory=list)
|
||||
total_size: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComponentMemory:
|
||||
"""Tracks memory usage for a component."""
|
||||
|
||||
name: str
|
||||
text_size: int = 0 # Code in flash
|
||||
rodata_size: int = 0 # Read-only data in flash
|
||||
data_size: int = 0 # Initialized data (flash + ram)
|
||||
bss_size: int = 0 # Uninitialized data (ram only)
|
||||
symbol_count: int = 0
|
||||
|
||||
@property
|
||||
def flash_total(self) -> int:
|
||||
"""Total flash usage (text + rodata + data)."""
|
||||
return self.text_size + self.rodata_size + self.data_size
|
||||
|
||||
@property
|
||||
def ram_total(self) -> int:
|
||||
"""Total RAM usage (data + bss)."""
|
||||
return self.data_size + self.bss_size
|
||||
|
||||
|
||||
class MemoryAnalyzer:
|
||||
"""Analyzes memory usage from ELF files."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
elf_path: str,
|
||||
objdump_path: str | None = None,
|
||||
readelf_path: str | None = None,
|
||||
external_components: set[str] | None = None,
|
||||
idedata: "IDEData | None" = None,
|
||||
) -> None:
|
||||
"""Initialize memory analyzer.
|
||||
|
||||
Args:
|
||||
elf_path: Path to ELF file to analyze
|
||||
objdump_path: Path to objdump binary (auto-detected from idedata if not provided)
|
||||
readelf_path: Path to readelf binary (auto-detected from idedata if not provided)
|
||||
external_components: Set of external component names
|
||||
idedata: Optional PlatformIO IDEData object to auto-detect toolchain paths
|
||||
"""
|
||||
self.elf_path = Path(elf_path)
|
||||
if not self.elf_path.exists():
|
||||
raise FileNotFoundError(f"ELF file not found: {elf_path}")
|
||||
|
||||
# Auto-detect toolchain paths from idedata if not provided
|
||||
if idedata is not None and (objdump_path is None or readelf_path is None):
|
||||
objdump_path = objdump_path or idedata.objdump_path
|
||||
readelf_path = readelf_path or idedata.readelf_path
|
||||
_LOGGER.debug("Using toolchain paths from PlatformIO idedata")
|
||||
|
||||
self.objdump_path = objdump_path or "objdump"
|
||||
self.readelf_path = readelf_path or "readelf"
|
||||
self.external_components = external_components or set()
|
||||
|
||||
self.sections: dict[str, MemorySection] = {}
|
||||
self.components: dict[str, ComponentMemory] = defaultdict(
|
||||
lambda: ComponentMemory("")
|
||||
)
|
||||
self._demangle_cache: dict[str, str] = {}
|
||||
self._uncategorized_symbols: list[tuple[str, str, int]] = []
|
||||
self._esphome_core_symbols: list[
|
||||
tuple[str, str, int]
|
||||
] = [] # Track core symbols
|
||||
self._component_symbols: dict[str, list[tuple[str, str, int]]] = defaultdict(
|
||||
list
|
||||
) # Track symbols for all components
|
||||
|
||||
def analyze(self) -> dict[str, ComponentMemory]:
|
||||
"""Analyze the ELF file and return component memory usage."""
|
||||
self._parse_sections()
|
||||
self._parse_symbols()
|
||||
self._categorize_symbols()
|
||||
return dict(self.components)
|
||||
|
||||
def _parse_sections(self) -> None:
|
||||
"""Parse section headers from ELF file."""
|
||||
result = subprocess.run(
|
||||
[self.readelf_path, "-S", str(self.elf_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Parse section headers
|
||||
for line in result.stdout.splitlines():
|
||||
# Look for section entries
|
||||
if not (match := _READELF_SECTION_PATTERN.match(line)):
|
||||
continue
|
||||
|
||||
section_name = match.group(1)
|
||||
size_hex = match.group(2)
|
||||
size = int(size_hex, 16)
|
||||
|
||||
# Map to standard section name
|
||||
mapped_section = map_section_name(section_name)
|
||||
if not mapped_section:
|
||||
continue
|
||||
|
||||
if mapped_section not in self.sections:
|
||||
self.sections[mapped_section] = MemorySection(mapped_section)
|
||||
self.sections[mapped_section].total_size += size
|
||||
|
||||
def _parse_symbols(self) -> None:
|
||||
"""Parse symbols from ELF file."""
|
||||
result = subprocess.run(
|
||||
[self.objdump_path, "-t", str(self.elf_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Track seen addresses to avoid duplicates
|
||||
seen_addresses: set[str] = set()
|
||||
|
||||
for line in result.stdout.splitlines():
|
||||
if not (symbol_info := parse_symbol_line(line)):
|
||||
continue
|
||||
|
||||
section, name, size, address = symbol_info
|
||||
|
||||
# Skip duplicate symbols at the same address (e.g., C1/C2 constructors)
|
||||
if address in seen_addresses or section not in self.sections:
|
||||
continue
|
||||
|
||||
self.sections[section].symbols.append((name, size, ""))
|
||||
seen_addresses.add(address)
|
||||
|
||||
def _categorize_symbols(self) -> None:
|
||||
"""Categorize symbols by component."""
|
||||
# First, collect all unique symbol names for batch demangling
|
||||
all_symbols = {
|
||||
symbol_name
|
||||
for section in self.sections.values()
|
||||
for symbol_name, _, _ in section.symbols
|
||||
}
|
||||
|
||||
# Batch demangle all symbols at once
|
||||
self._batch_demangle_symbols(list(all_symbols))
|
||||
|
||||
# Now categorize with cached demangled names
|
||||
for section_name, section in self.sections.items():
|
||||
for symbol_name, size, _ in section.symbols:
|
||||
component = self._identify_component(symbol_name)
|
||||
|
||||
if component not in self.components:
|
||||
self.components[component] = ComponentMemory(component)
|
||||
|
||||
comp_mem = self.components[component]
|
||||
comp_mem.symbol_count += 1
|
||||
|
||||
# Update the appropriate size attribute based on section
|
||||
if attr_name := SECTION_TO_ATTR.get(section_name):
|
||||
setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size)
|
||||
|
||||
# Track uncategorized symbols
|
||||
if component == "other" and size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._uncategorized_symbols.append((symbol_name, demangled, size))
|
||||
|
||||
# Track ESPHome core symbols for detailed analysis
|
||||
if component == _COMPONENT_CORE and size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._esphome_core_symbols.append((symbol_name, demangled, size))
|
||||
|
||||
# Track all component symbols for detailed analysis
|
||||
if size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._component_symbols[component].append(
|
||||
(symbol_name, demangled, size)
|
||||
)
|
||||
|
||||
def _identify_component(self, symbol_name: str) -> str:
|
||||
"""Identify which component a symbol belongs to."""
|
||||
# Demangle C++ names if needed
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
|
||||
# Check for special component classes first (before namespace pattern)
|
||||
# This handles cases like esphome::ESPHomeOTAComponent which should map to ota
|
||||
if _NAMESPACE_ESPHOME in demangled:
|
||||
# Check for special component classes that include component name in the class
|
||||
# For example: esphome::ESPHomeOTAComponent -> ota component
|
||||
for component_name in get_esphome_components():
|
||||
patterns = get_component_class_patterns(component_name)
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
|
||||
|
||||
# Check for ESPHome component namespaces
|
||||
match = ESPHOME_COMPONENT_PATTERN.search(demangled)
|
||||
if match:
|
||||
component_name = match.group(1)
|
||||
# Strip trailing underscore if present (e.g., switch_ -> switch)
|
||||
component_name = component_name.rstrip("_")
|
||||
|
||||
# Check if this is an actual component in the components directory
|
||||
if component_name in get_esphome_components():
|
||||
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
|
||||
# Check if this is a known external component from the config
|
||||
if component_name in self.external_components:
|
||||
return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
|
||||
# Everything else in esphome:: namespace is core
|
||||
return _COMPONENT_CORE
|
||||
|
||||
# Check for esphome core namespace (no component namespace)
|
||||
if _NAMESPACE_ESPHOME in demangled:
|
||||
# If no component match found, it's core
|
||||
return _COMPONENT_CORE
|
||||
|
||||
# Check against symbol patterns
|
||||
for component, patterns in SYMBOL_PATTERNS.items():
|
||||
if any(pattern in symbol_name for pattern in patterns):
|
||||
return component
|
||||
|
||||
# Check against demangled patterns
|
||||
for component, patterns in DEMANGLED_PATTERNS.items():
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return component
|
||||
|
||||
# Special cases that need more complex logic
|
||||
|
||||
# Check if spi_flash vs spi_driver
|
||||
if "spi_" in symbol_name or "SPI" in symbol_name:
|
||||
return "spi_flash" if "spi_flash" in symbol_name else "spi_driver"
|
||||
|
||||
# libc special printf variants
|
||||
if (
|
||||
symbol_name.startswith("_")
|
||||
and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "")
|
||||
in _LIBC_PRINTF_SCANF_FAMILY
|
||||
):
|
||||
return "libc"
|
||||
|
||||
# Track uncategorized symbols for analysis
|
||||
return "other"
|
||||
|
||||
def _batch_demangle_symbols(self, symbols: list[str]) -> None:
|
||||
"""Batch demangle C++ symbol names for efficiency."""
|
||||
if not symbols:
|
||||
return
|
||||
|
||||
# Try to find the appropriate c++filt for the platform
|
||||
cppfilt_cmd = "c++filt"
|
||||
|
||||
_LOGGER.info("Demangling %d symbols", len(symbols))
|
||||
_LOGGER.debug("objdump_path = %s", self.objdump_path)
|
||||
|
||||
# Check if we have a toolchain-specific c++filt
|
||||
if self.objdump_path and self.objdump_path != "objdump":
|
||||
# Replace objdump with c++filt in the path
|
||||
potential_cppfilt = self.objdump_path.replace("objdump", "c++filt")
|
||||
_LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt)
|
||||
if Path(potential_cppfilt).exists():
|
||||
cppfilt_cmd = potential_cppfilt
|
||||
_LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"✗ Toolchain c++filt not found at %s, using system c++filt",
|
||||
potential_cppfilt,
|
||||
)
|
||||
else:
|
||||
_LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path)
|
||||
|
||||
# Strip GCC optimization suffixes and prefixes before demangling
|
||||
# Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt
|
||||
# Prefixes like _GLOBAL__sub_I_ need to be removed and tracked
|
||||
symbols_stripped: list[str] = []
|
||||
symbols_prefixes: list[str] = [] # Track removed prefixes
|
||||
for symbol in symbols:
|
||||
# Remove GCC optimization markers
|
||||
stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol)
|
||||
|
||||
# Handle GCC global constructor/initializer prefixes
|
||||
# _GLOBAL__sub_I_<mangled> -> extract <mangled> for demangling
|
||||
prefix = ""
|
||||
for gcc_prefix in _GCC_PREFIX_ANNOTATIONS:
|
||||
if stripped.startswith(gcc_prefix):
|
||||
prefix = gcc_prefix
|
||||
stripped = stripped[len(prefix) :]
|
||||
break
|
||||
|
||||
symbols_stripped.append(stripped)
|
||||
symbols_prefixes.append(prefix)
|
||||
|
||||
try:
|
||||
# Send all symbols to c++filt at once
|
||||
result = subprocess.run(
|
||||
[cppfilt_cmd],
|
||||
input="\n".join(symbols_stripped),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e:
|
||||
# On error, cache originals
|
||||
_LOGGER.warning("Failed to batch demangle symbols: %s", e)
|
||||
for symbol in symbols:
|
||||
self._demangle_cache[symbol] = symbol
|
||||
return
|
||||
|
||||
if result.returncode != 0:
|
||||
_LOGGER.warning(
|
||||
"c++filt exited with code %d: %s",
|
||||
result.returncode,
|
||||
result.stderr[:200] if result.stderr else "(no error output)",
|
||||
)
|
||||
# Cache originals on failure
|
||||
for symbol in symbols:
|
||||
self._demangle_cache[symbol] = symbol
|
||||
return
|
||||
|
||||
# Process demangled output
|
||||
self._process_demangled_output(
|
||||
symbols, symbols_stripped, symbols_prefixes, result.stdout, cppfilt_cmd
|
||||
)
|
||||
|
||||
def _process_demangled_output(
|
||||
self,
|
||||
symbols: list[str],
|
||||
symbols_stripped: list[str],
|
||||
symbols_prefixes: list[str],
|
||||
demangled_output: str,
|
||||
cppfilt_cmd: str,
|
||||
) -> None:
|
||||
"""Process demangled symbol output and populate cache.
|
||||
|
||||
Args:
|
||||
symbols: Original symbol names
|
||||
symbols_stripped: Stripped symbol names sent to c++filt
|
||||
symbols_prefixes: Removed prefixes to restore
|
||||
demangled_output: Output from c++filt
|
||||
cppfilt_cmd: Path to c++filt command (for logging)
|
||||
"""
|
||||
demangled_lines = demangled_output.strip().split("\n")
|
||||
failed_count = 0
|
||||
|
||||
for original, stripped, prefix, demangled in zip(
|
||||
symbols, symbols_stripped, symbols_prefixes, demangled_lines
|
||||
):
|
||||
# Add back any prefix that was removed
|
||||
demangled = self._restore_symbol_prefix(prefix, stripped, demangled)
|
||||
|
||||
# If we stripped a suffix, add it back to the demangled name for clarity
|
||||
if original != stripped and not prefix:
|
||||
demangled = self._restore_symbol_suffix(original, demangled)
|
||||
|
||||
self._demangle_cache[original] = demangled
|
||||
|
||||
# Log symbols that failed to demangle (stayed the same as stripped version)
|
||||
if stripped == demangled and stripped.startswith("_Z"):
|
||||
failed_count += 1
|
||||
if failed_count <= 5: # Only log first 5 failures
|
||||
_LOGGER.warning("Failed to demangle: %s", original)
|
||||
|
||||
if failed_count == 0:
|
||||
_LOGGER.info("Successfully demangled all %d symbols", len(symbols))
|
||||
return
|
||||
|
||||
_LOGGER.warning(
|
||||
"Failed to demangle %d/%d symbols using %s",
|
||||
failed_count,
|
||||
len(symbols),
|
||||
cppfilt_cmd,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str:
|
||||
"""Restore prefix that was removed before demangling.
|
||||
|
||||
Args:
|
||||
prefix: Prefix that was removed (e.g., "_GLOBAL__sub_I_")
|
||||
stripped: Stripped symbol name
|
||||
demangled: Demangled symbol name
|
||||
|
||||
Returns:
|
||||
Demangled name with prefix restored/annotated
|
||||
"""
|
||||
if not prefix:
|
||||
return demangled
|
||||
|
||||
# Successfully demangled - add descriptive prefix
|
||||
if demangled != stripped and (
|
||||
annotation := _GCC_PREFIX_ANNOTATIONS.get(prefix)
|
||||
):
|
||||
return f"[{annotation}: {demangled}]"
|
||||
|
||||
# Failed to demangle - restore original prefix
|
||||
return prefix + demangled
|
||||
|
||||
@staticmethod
|
||||
def _restore_symbol_suffix(original: str, demangled: str) -> str:
|
||||
"""Restore GCC optimization suffix that was removed before demangling.
|
||||
|
||||
Args:
|
||||
original: Original symbol name with suffix
|
||||
demangled: Demangled symbol name without suffix
|
||||
|
||||
Returns:
|
||||
Demangled name with suffix annotation
|
||||
"""
|
||||
if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original):
|
||||
return f"{demangled} [{suffix_match.group(1)}]"
|
||||
return demangled
|
||||
|
||||
def _demangle_symbol(self, symbol: str) -> str:
|
||||
"""Get demangled C++ symbol name from cache."""
|
||||
return self._demangle_cache.get(symbol, symbol)
|
||||
|
||||
def _categorize_esphome_core_symbol(self, demangled: str) -> str:
|
||||
"""Categorize ESPHome core symbols into subcategories."""
|
||||
# Special patterns that need to be checked separately
|
||||
if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS):
|
||||
return "C++ Runtime (vtables/RTTI)"
|
||||
|
||||
if demangled.startswith(_NAMESPACE_STD):
|
||||
return "C++ STL"
|
||||
|
||||
# Check against patterns from const.py
|
||||
for category, patterns in CORE_SUBCATEGORY_PATTERNS.items():
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return category
|
||||
|
||||
return "Other Core"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from .cli import main
|
||||
|
||||
main()
|
||||
6
esphome/analyze_memory/__main__.py
Normal file
6
esphome/analyze_memory/__main__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Main entry point for running the memory analyzer as a module."""
|
||||
|
||||
from .cli import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
408
esphome/analyze_memory/cli.py
Normal file
408
esphome/analyze_memory/cli.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""CLI interface for memory analysis with report generation."""
|
||||
|
||||
from collections import defaultdict
|
||||
import sys
|
||||
|
||||
from . import (
|
||||
_COMPONENT_API,
|
||||
_COMPONENT_CORE,
|
||||
_COMPONENT_PREFIX_ESPHOME,
|
||||
_COMPONENT_PREFIX_EXTERNAL,
|
||||
MemoryAnalyzer,
|
||||
)
|
||||
|
||||
|
||||
class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
"""Memory analyzer with CLI-specific report generation."""
|
||||
|
||||
# Column width constants
|
||||
COL_COMPONENT: int = 29
|
||||
COL_FLASH_TEXT: int = 14
|
||||
COL_FLASH_DATA: int = 14
|
||||
COL_RAM_DATA: int = 12
|
||||
COL_RAM_BSS: int = 12
|
||||
COL_TOTAL_FLASH: int = 15
|
||||
COL_TOTAL_RAM: int = 12
|
||||
COL_SEPARATOR: int = 3 # " | "
|
||||
|
||||
# Core analysis column widths
|
||||
COL_CORE_SUBCATEGORY: int = 30
|
||||
COL_CORE_SIZE: int = 12
|
||||
COL_CORE_COUNT: int = 6
|
||||
COL_CORE_PERCENT: int = 10
|
||||
|
||||
# Calculate table width once at class level
|
||||
TABLE_WIDTH: int = (
|
||||
COL_COMPONENT
|
||||
+ COL_SEPARATOR
|
||||
+ COL_FLASH_TEXT
|
||||
+ COL_SEPARATOR
|
||||
+ COL_FLASH_DATA
|
||||
+ COL_SEPARATOR
|
||||
+ COL_RAM_DATA
|
||||
+ COL_SEPARATOR
|
||||
+ COL_RAM_BSS
|
||||
+ COL_SEPARATOR
|
||||
+ COL_TOTAL_FLASH
|
||||
+ COL_SEPARATOR
|
||||
+ COL_TOTAL_RAM
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _make_separator_line(*widths: int) -> str:
|
||||
"""Create a separator line with given column widths.
|
||||
|
||||
Args:
|
||||
widths: Column widths to create separators for
|
||||
|
||||
Returns:
|
||||
Separator line like "----+---------+-----"
|
||||
"""
|
||||
return "-+-".join("-" * width for width in widths)
|
||||
|
||||
# Pre-computed separator lines
|
||||
MAIN_TABLE_SEPARATOR: str = _make_separator_line(
|
||||
COL_COMPONENT,
|
||||
COL_FLASH_TEXT,
|
||||
COL_FLASH_DATA,
|
||||
COL_RAM_DATA,
|
||||
COL_RAM_BSS,
|
||||
COL_TOTAL_FLASH,
|
||||
COL_TOTAL_RAM,
|
||||
)
|
||||
|
||||
CORE_TABLE_SEPARATOR: str = _make_separator_line(
|
||||
COL_CORE_SUBCATEGORY,
|
||||
COL_CORE_SIZE,
|
||||
COL_CORE_COUNT,
|
||||
COL_CORE_PERCENT,
|
||||
)
|
||||
|
||||
def generate_report(self, detailed: bool = False) -> str:
|
||||
"""Generate a formatted memory report."""
|
||||
components = sorted(
|
||||
self.components.items(), key=lambda x: x[1].flash_total, reverse=True
|
||||
)
|
||||
|
||||
# Calculate totals
|
||||
total_flash = sum(c.flash_total for _, c in components)
|
||||
total_ram = sum(c.ram_total for _, c in components)
|
||||
|
||||
# Build report
|
||||
lines: list[str] = []
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("Component Memory Analysis".center(self.TABLE_WIDTH))
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Main table - fixed column widths
|
||||
lines.append(
|
||||
f"{'Component':<{self.COL_COMPONENT}} | {'Flash (text)':>{self.COL_FLASH_TEXT}} | {'Flash (data)':>{self.COL_FLASH_DATA}} | {'RAM (data)':>{self.COL_RAM_DATA}} | {'RAM (bss)':>{self.COL_RAM_BSS}} | {'Total Flash':>{self.COL_TOTAL_FLASH}} | {'Total RAM':>{self.COL_TOTAL_RAM}}"
|
||||
)
|
||||
lines.append(self.MAIN_TABLE_SEPARATOR)
|
||||
|
||||
for name, mem in components:
|
||||
if mem.flash_total > 0 or mem.ram_total > 0:
|
||||
flash_rodata = mem.rodata_size + mem.data_size
|
||||
lines.append(
|
||||
f"{name:<{self.COL_COMPONENT}} | {mem.text_size:>{self.COL_FLASH_TEXT - 2},} B | {flash_rodata:>{self.COL_FLASH_DATA - 2},} B | "
|
||||
f"{mem.data_size:>{self.COL_RAM_DATA - 2},} B | {mem.bss_size:>{self.COL_RAM_BSS - 2},} B | "
|
||||
f"{mem.flash_total:>{self.COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{self.COL_TOTAL_RAM - 2},} B"
|
||||
)
|
||||
|
||||
lines.append(self.MAIN_TABLE_SEPARATOR)
|
||||
lines.append(
|
||||
f"{'TOTAL':<{self.COL_COMPONENT}} | {' ':>{self.COL_FLASH_TEXT}} | {' ':>{self.COL_FLASH_DATA}} | "
|
||||
f"{' ':>{self.COL_RAM_DATA}} | {' ':>{self.COL_RAM_BSS}} | "
|
||||
f"{total_flash:>{self.COL_TOTAL_FLASH - 2},} B | {total_ram:>{self.COL_TOTAL_RAM - 2},} B"
|
||||
)
|
||||
|
||||
# Top consumers
|
||||
lines.append("")
|
||||
lines.append("Top Flash Consumers:")
|
||||
for i, (name, mem) in enumerate(components[:25]):
|
||||
if mem.flash_total > 0:
|
||||
percentage = (
|
||||
(mem.flash_total / total_flash * 100) if total_flash > 0 else 0
|
||||
)
|
||||
lines.append(
|
||||
f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash"
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
lines.append("Top RAM Consumers:")
|
||||
ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True)
|
||||
for i, (name, mem) in enumerate(ram_components[:25]):
|
||||
if mem.ram_total > 0:
|
||||
percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0
|
||||
lines.append(
|
||||
f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM"
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
lines.append(
|
||||
"Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included."
|
||||
)
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
# Add ESPHome core detailed analysis if there are core symbols
|
||||
if self._esphome_core_symbols:
|
||||
lines.append("")
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append(
|
||||
f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH)
|
||||
)
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Group core symbols by subcategory
|
||||
core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict(
|
||||
list
|
||||
)
|
||||
|
||||
for symbol, demangled, size in self._esphome_core_symbols:
|
||||
# Categorize based on demangled name patterns
|
||||
subcategory = self._categorize_esphome_core_symbol(demangled)
|
||||
core_subcategories[subcategory].append((symbol, demangled, size))
|
||||
|
||||
# Sort subcategories by total size
|
||||
sorted_subcategories = sorted(
|
||||
[
|
||||
(name, symbols, sum(s[2] for s in symbols))
|
||||
for name, symbols in core_subcategories.items()
|
||||
],
|
||||
key=lambda x: x[2],
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
lines.append(
|
||||
f"{'Subcategory':<{self.COL_CORE_SUBCATEGORY}} | {'Size':>{self.COL_CORE_SIZE}} | "
|
||||
f"{'Count':>{self.COL_CORE_COUNT}} | {'% of Core':>{self.COL_CORE_PERCENT}}"
|
||||
)
|
||||
lines.append(self.CORE_TABLE_SEPARATOR)
|
||||
|
||||
core_total = sum(size for _, _, size in self._esphome_core_symbols)
|
||||
|
||||
for subcategory, symbols, total_size in sorted_subcategories:
|
||||
percentage = (total_size / core_total * 100) if core_total > 0 else 0
|
||||
lines.append(
|
||||
f"{subcategory:<{self.COL_CORE_SUBCATEGORY}} | {total_size:>{self.COL_CORE_SIZE - 2},} B | "
|
||||
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
|
||||
)
|
||||
|
||||
# Top 15 largest core symbols
|
||||
lines.append("")
|
||||
lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
|
||||
sorted_core_symbols = sorted(
|
||||
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
|
||||
)
|
||||
|
||||
for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
# Add detailed analysis for top ESPHome and external components
|
||||
esphome_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE
|
||||
]
|
||||
external_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name.startswith(_COMPONENT_PREFIX_EXTERNAL)
|
||||
]
|
||||
|
||||
top_esphome_components = sorted(
|
||||
esphome_components, key=lambda x: x[1].flash_total, reverse=True
|
||||
)[:30]
|
||||
|
||||
# Include all external components (they're usually important)
|
||||
top_external_components = sorted(
|
||||
external_components, key=lambda x: x[1].flash_total, reverse=True
|
||||
)
|
||||
|
||||
# Check if API component exists and ensure it's included
|
||||
api_component = None
|
||||
for name, mem in components:
|
||||
if name == _COMPONENT_API:
|
||||
api_component = (name, mem)
|
||||
break
|
||||
|
||||
# Combine all components to analyze: top ESPHome + all external + API if not already included
|
||||
components_to_analyze = list(top_esphome_components) + list(
|
||||
top_external_components
|
||||
)
|
||||
if api_component and api_component not in components_to_analyze:
|
||||
components_to_analyze.append(api_component)
|
||||
|
||||
if components_to_analyze:
|
||||
for comp_name, comp_mem in components_to_analyze:
|
||||
if not (comp_symbols := self._component_symbols.get(comp_name, [])):
|
||||
continue
|
||||
lines.append("")
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append(f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH))
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Sort symbols by size
|
||||
sorted_symbols = sorted(comp_symbols, key=lambda x: x[2], reverse=True)
|
||||
|
||||
lines.append(f"Total symbols: {len(sorted_symbols)}")
|
||||
lines.append(f"Total size: {comp_mem.flash_total:,} B")
|
||||
lines.append("")
|
||||
|
||||
# Show all symbols > 100 bytes for better visibility
|
||||
large_symbols = [
|
||||
(sym, dem, size) for sym, dem, size in sorted_symbols if size > 100
|
||||
]
|
||||
|
||||
lines.append(
|
||||
f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):"
|
||||
)
|
||||
for i, (symbol, demangled, size) in enumerate(large_symbols):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def dump_uncategorized_symbols(self, output_file: str | None = None) -> None:
|
||||
"""Dump uncategorized symbols for analysis."""
|
||||
# Sort by size descending
|
||||
sorted_symbols = sorted(
|
||||
self._uncategorized_symbols, key=lambda x: x[2], reverse=True
|
||||
)
|
||||
|
||||
lines = ["Uncategorized Symbols Analysis", "=" * 80]
|
||||
lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}")
|
||||
lines.append(
|
||||
f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes"
|
||||
)
|
||||
lines.append("")
|
||||
lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled")
|
||||
lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40)
|
||||
|
||||
for symbol, demangled, size in sorted_symbols[:100]: # Top 100
|
||||
demangled_display = (
|
||||
demangled[:100] if symbol != demangled else "[not demangled]"
|
||||
)
|
||||
lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled_display}")
|
||||
|
||||
if len(sorted_symbols) > 100:
|
||||
lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols")
|
||||
|
||||
content = "\n".join(lines)
|
||||
|
||||
if output_file:
|
||||
with open(output_file, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
else:
|
||||
print(content)
|
||||
|
||||
|
||||
def analyze_elf(
|
||||
elf_path: str,
|
||||
objdump_path: str | None = None,
|
||||
readelf_path: str | None = None,
|
||||
detailed: bool = False,
|
||||
external_components: set[str] | None = None,
|
||||
) -> str:
|
||||
"""Analyze an ELF file and return a memory report."""
|
||||
analyzer = MemoryAnalyzerCLI(
|
||||
elf_path, objdump_path, readelf_path, external_components
|
||||
)
|
||||
analyzer.analyze()
|
||||
return analyzer.generate_report(detailed)
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entrypoint for memory analysis."""
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python -m esphome.analyze_memory <build_directory>")
|
||||
print("\nAnalyze memory usage from an ESPHome build directory.")
|
||||
print("The build directory should contain firmware.elf and idedata will be")
|
||||
print("loaded from ~/.esphome/.internal/idedata/<device>.json")
|
||||
print("\nExamples:")
|
||||
print(" python -m esphome.analyze_memory ~/.esphome/build/my-device")
|
||||
print(" python -m esphome.analyze_memory .esphome/build/my-device")
|
||||
print(" python -m esphome.analyze_memory my-device # Short form")
|
||||
sys.exit(1)
|
||||
|
||||
build_dir = sys.argv[1]
|
||||
|
||||
# Load build directory
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from esphome.platformio_api import IDEData
|
||||
|
||||
build_path = Path(build_dir)
|
||||
|
||||
# If no path separator in name, assume it's a device name
|
||||
if "/" not in build_dir and not build_path.is_dir():
|
||||
# Try current directory first
|
||||
cwd_path = Path.cwd() / ".esphome" / "build" / build_dir
|
||||
if cwd_path.is_dir():
|
||||
build_path = cwd_path
|
||||
print(f"Using build directory: {build_path}", file=sys.stderr)
|
||||
else:
|
||||
# Fall back to home directory
|
||||
build_path = Path.home() / ".esphome" / "build" / build_dir
|
||||
print(f"Using build directory: {build_path}", file=sys.stderr)
|
||||
|
||||
if not build_path.is_dir():
|
||||
print(f"Error: {build_path} is not a directory", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find firmware.elf
|
||||
elf_file = None
|
||||
for elf_candidate in [
|
||||
build_path / "firmware.elf",
|
||||
build_path / ".pioenvs" / build_path.name / "firmware.elf",
|
||||
]:
|
||||
if elf_candidate.exists():
|
||||
elf_file = str(elf_candidate)
|
||||
break
|
||||
|
||||
if not elf_file:
|
||||
print(f"Error: firmware.elf not found in {build_dir}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find idedata.json - check current directory first, then home
|
||||
device_name = build_path.name
|
||||
idedata_candidates = [
|
||||
Path.cwd() / ".esphome" / "idedata" / f"{device_name}.json",
|
||||
Path.home() / ".esphome" / "idedata" / f"{device_name}.json",
|
||||
]
|
||||
|
||||
idedata = None
|
||||
for idedata_path in idedata_candidates:
|
||||
if not idedata_path.exists():
|
||||
continue
|
||||
try:
|
||||
with open(idedata_path, encoding="utf-8") as f:
|
||||
raw_data = json.load(f)
|
||||
idedata = IDEData(raw_data)
|
||||
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
|
||||
break
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr)
|
||||
|
||||
if not idedata:
|
||||
print(
|
||||
f"Warning: idedata not found (searched {idedata_candidates[0]} and {idedata_candidates[1]})",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata)
|
||||
analyzer.analyze()
|
||||
report = analyzer.generate_report()
|
||||
print(report)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
903
esphome/analyze_memory/const.py
Normal file
903
esphome/analyze_memory/const.py
Normal file
@@ -0,0 +1,903 @@
|
||||
"""Constants for memory analysis symbol pattern matching."""
|
||||
|
||||
import re
|
||||
|
||||
# Pattern to extract ESPHome component namespaces dynamically
|
||||
ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::")
|
||||
|
||||
# Section mapping for ELF file sections
|
||||
# Maps standard section names to their various platform-specific variants
|
||||
SECTION_MAPPING = {
|
||||
".text": frozenset([".text", ".iram"]),
|
||||
".rodata": frozenset([".rodata"]),
|
||||
".data": frozenset([".data", ".dram"]),
|
||||
".bss": frozenset([".bss"]),
|
||||
}
|
||||
|
||||
# Section to ComponentMemory attribute mapping
|
||||
# Maps section names to the attribute name in ComponentMemory dataclass
|
||||
SECTION_TO_ATTR = {
|
||||
".text": "text_size",
|
||||
".rodata": "rodata_size",
|
||||
".data": "data_size",
|
||||
".bss": "bss_size",
|
||||
}
|
||||
|
||||
# Component identification rules
|
||||
# Symbol patterns: patterns found in raw symbol names
|
||||
SYMBOL_PATTERNS = {
|
||||
"freertos": [
|
||||
"vTask",
|
||||
"xTask",
|
||||
"xQueue",
|
||||
"pvPort",
|
||||
"vPort",
|
||||
"uxTask",
|
||||
"pcTask",
|
||||
"prvTimerTask",
|
||||
"prvAddNewTaskToReadyList",
|
||||
"pxReadyTasksLists",
|
||||
"prvAddCurrentTaskToDelayedList",
|
||||
"xEventGroupWaitBits",
|
||||
"xRingbufferSendFromISR",
|
||||
"prvSendItemDoneNoSplit",
|
||||
"prvReceiveGeneric",
|
||||
"prvSendAcquireGeneric",
|
||||
"prvCopyItemAllowSplit",
|
||||
"xEventGroup",
|
||||
"xRingbuffer",
|
||||
"prvSend",
|
||||
"prvReceive",
|
||||
"prvCopy",
|
||||
"xPort",
|
||||
"ulTaskGenericNotifyTake",
|
||||
"prvIdleTask",
|
||||
"prvInitialiseNewTask",
|
||||
"prvIsYieldRequiredSMP",
|
||||
"prvGetItemByteBuf",
|
||||
"prvInitializeNewRingbuffer",
|
||||
"prvAcquireItemNoSplit",
|
||||
"prvNotifyQueueSetContainer",
|
||||
"ucStaticTimerQueueStorage",
|
||||
"eTaskGetState",
|
||||
"main_task",
|
||||
"do_system_init_fn",
|
||||
"xSemaphoreCreateGenericWithCaps",
|
||||
"vListInsert",
|
||||
"uxListRemove",
|
||||
"vRingbufferReturnItem",
|
||||
"vRingbufferReturnItemFromISR",
|
||||
"prvCheckItemFitsByteBuffer",
|
||||
"prvGetCurMaxSizeAllowSplit",
|
||||
"tick_hook",
|
||||
"sys_sem_new",
|
||||
"sys_arch_mbox_fetch",
|
||||
"sys_arch_sem_wait",
|
||||
"prvDeleteTCB",
|
||||
"vQueueDeleteWithCaps",
|
||||
"vRingbufferDeleteWithCaps",
|
||||
"vSemaphoreDeleteWithCaps",
|
||||
"prvCheckItemAvail",
|
||||
"prvCheckTaskCanBeScheduledSMP",
|
||||
"prvGetCurMaxSizeNoSplit",
|
||||
"prvResetNextTaskUnblockTime",
|
||||
"prvReturnItemByteBuf",
|
||||
"vApplicationStackOverflowHook",
|
||||
"vApplicationGetIdleTaskMemory",
|
||||
"sys_init",
|
||||
"sys_mbox_new",
|
||||
"sys_arch_mbox_tryfetch",
|
||||
],
|
||||
"xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"],
|
||||
"heap": ["heap_", "multi_heap"],
|
||||
"spi_flash": ["spi_flash"],
|
||||
"rtc": ["rtc_", "rtcio_ll_"],
|
||||
"gpio_driver": ["gpio_", "pins"],
|
||||
"uart_driver": ["uart", "_uart", "UART"],
|
||||
"timer": ["timer_", "esp_timer"],
|
||||
"peripherals": ["periph_", "periman"],
|
||||
"network_stack": [
|
||||
"vj_compress",
|
||||
"raw_sendto",
|
||||
"raw_input",
|
||||
"etharp_",
|
||||
"icmp_input",
|
||||
"socket_ipv6",
|
||||
"ip_napt",
|
||||
"socket_ipv4_multicast",
|
||||
"socket_ipv6_multicast",
|
||||
"netconn_",
|
||||
"recv_raw",
|
||||
"accept_function",
|
||||
"netconn_recv_data",
|
||||
"netconn_accept",
|
||||
"netconn_write_vectors_partly",
|
||||
"netconn_drain",
|
||||
"raw_connect",
|
||||
"raw_bind",
|
||||
"icmp_send_response",
|
||||
"sockets",
|
||||
"icmp_dest_unreach",
|
||||
"inet_chksum_pseudo",
|
||||
"alloc_socket",
|
||||
"done_socket",
|
||||
"set_global_fd_sets",
|
||||
"inet_chksum_pbuf",
|
||||
"tryget_socket_unconn_locked",
|
||||
"tryget_socket_unconn",
|
||||
"cs_create_ctrl_sock",
|
||||
"netbuf_alloc",
|
||||
],
|
||||
"ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"],
|
||||
"wifi_stack": [
|
||||
"ieee80211",
|
||||
"hostap",
|
||||
"sta_",
|
||||
"ap_",
|
||||
"scan_",
|
||||
"wifi_",
|
||||
"wpa_",
|
||||
"wps_",
|
||||
"esp_wifi",
|
||||
"cnx_",
|
||||
"wpa3_",
|
||||
"sae_",
|
||||
"wDev_",
|
||||
"ic_",
|
||||
"mac_",
|
||||
"esf_buf",
|
||||
"gWpaSm",
|
||||
"sm_WPA",
|
||||
"eapol_",
|
||||
"owe_",
|
||||
"wifiLowLevelInit",
|
||||
"s_do_mapping",
|
||||
"gScanStruct",
|
||||
"ppSearchTxframe",
|
||||
"ppMapWaitTxq",
|
||||
"ppFillAMPDUBar",
|
||||
"ppCheckTxConnTrafficIdle",
|
||||
"ppCalTkipMic",
|
||||
],
|
||||
"bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"],
|
||||
"wifi_bt_coex": ["coex"],
|
||||
"bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"],
|
||||
"bluedroid_bt": [
|
||||
"bluedroid",
|
||||
"btc_",
|
||||
"bta_",
|
||||
"btm_",
|
||||
"btu_",
|
||||
"BTM_",
|
||||
"GATT",
|
||||
"L2CA_",
|
||||
"smp_",
|
||||
"gatts_",
|
||||
"attp_",
|
||||
"l2cu_",
|
||||
"l2cb",
|
||||
"smp_cb",
|
||||
"BTA_GATTC_",
|
||||
"SMP_",
|
||||
"BTU_",
|
||||
"BTA_Dm",
|
||||
"GAP_Ble",
|
||||
"BT_tx_if",
|
||||
"host_recv_pkt_cb",
|
||||
"saved_local_oob_data",
|
||||
"string_to_bdaddr",
|
||||
"string_is_bdaddr",
|
||||
"CalConnectParamTimeout",
|
||||
"transmit_fragment",
|
||||
"transmit_data",
|
||||
"event_command_ready",
|
||||
"read_command_complete_header",
|
||||
"parse_read_local_extended_features_response",
|
||||
"parse_read_local_version_info_response",
|
||||
"should_request_high",
|
||||
"btdm_wakeup_request",
|
||||
"BTA_SetAttributeValue",
|
||||
"BTA_EnableBluetooth",
|
||||
"transmit_command_futured",
|
||||
"transmit_command",
|
||||
"get_waiting_command",
|
||||
"make_command",
|
||||
"transmit_downward",
|
||||
"host_recv_adv_packet",
|
||||
"copy_extra_byte_in_db",
|
||||
"parse_read_local_supported_commands_response",
|
||||
],
|
||||
"crypto_math": [
|
||||
"ecp_",
|
||||
"bignum_",
|
||||
"mpi_",
|
||||
"sswu",
|
||||
"modp",
|
||||
"dragonfly_",
|
||||
"gcm_mult",
|
||||
"__multiply",
|
||||
"quorem",
|
||||
"__mdiff",
|
||||
"__lshift",
|
||||
"__mprec_tens",
|
||||
"ECC_",
|
||||
"multiprecision_",
|
||||
"mix_sub_columns",
|
||||
"sbox",
|
||||
"gfm2_sbox",
|
||||
"gfm3_sbox",
|
||||
"curve_p256",
|
||||
"curve",
|
||||
"p_256_init_curve",
|
||||
"shift_sub_rows",
|
||||
"rshift",
|
||||
],
|
||||
"hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"],
|
||||
"libc": [
|
||||
"printf",
|
||||
"scanf",
|
||||
"malloc",
|
||||
"free",
|
||||
"memcpy",
|
||||
"memset",
|
||||
"strcpy",
|
||||
"strlen",
|
||||
"_dtoa",
|
||||
"_fopen",
|
||||
"__sfvwrite_r",
|
||||
"qsort",
|
||||
"__sf",
|
||||
"__sflush_r",
|
||||
"__srefill_r",
|
||||
"_impure_data",
|
||||
"_reclaim_reent",
|
||||
"_open_r",
|
||||
"strncpy",
|
||||
"_strtod_l",
|
||||
"__gethex",
|
||||
"__hexnan",
|
||||
"_setenv_r",
|
||||
"_tzset_unlocked_r",
|
||||
"__tzcalc_limits",
|
||||
"select",
|
||||
"scalbnf",
|
||||
"strtof",
|
||||
"strtof_l",
|
||||
"__d2b",
|
||||
"__b2d",
|
||||
"__s2b",
|
||||
"_Balloc",
|
||||
"__multadd",
|
||||
"__lo0bits",
|
||||
"__atexit0",
|
||||
"__smakebuf_r",
|
||||
"__swhatbuf_r",
|
||||
"_sungetc_r",
|
||||
"_close_r",
|
||||
"_link_r",
|
||||
"_unsetenv_r",
|
||||
"_rename_r",
|
||||
"__month_lengths",
|
||||
"tzinfo",
|
||||
"__ratio",
|
||||
"__hi0bits",
|
||||
"__ulp",
|
||||
"__any_on",
|
||||
"__copybits",
|
||||
"L_shift",
|
||||
"_fcntl_r",
|
||||
"_lseek_r",
|
||||
"_read_r",
|
||||
"_write_r",
|
||||
"_unlink_r",
|
||||
"_fstat_r",
|
||||
"access",
|
||||
"fsync",
|
||||
"tcsetattr",
|
||||
"tcgetattr",
|
||||
"tcflush",
|
||||
"tcdrain",
|
||||
"__ssrefill_r",
|
||||
"_stat_r",
|
||||
"__hexdig_fun",
|
||||
"__mcmp",
|
||||
"_fwalk_sglue",
|
||||
"__fpclassifyf",
|
||||
"_setlocale_r",
|
||||
"_mbrtowc_r",
|
||||
"fcntl",
|
||||
"__match",
|
||||
"_lock_close",
|
||||
"__c$",
|
||||
"__func__$",
|
||||
"__FUNCTION__$",
|
||||
"DAYS_IN_MONTH",
|
||||
"_DAYS_BEFORE_MONTH",
|
||||
"CSWTCH$",
|
||||
"dst$",
|
||||
"sulp",
|
||||
],
|
||||
"string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"],
|
||||
"memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"],
|
||||
"file_io": [
|
||||
"fread",
|
||||
"fwrite",
|
||||
"fopen",
|
||||
"fclose",
|
||||
"fseek",
|
||||
"ftell",
|
||||
"fflush",
|
||||
"s_fd_table",
|
||||
],
|
||||
"string_formatting": [
|
||||
"snprintf",
|
||||
"vsnprintf",
|
||||
"sprintf",
|
||||
"vsprintf",
|
||||
"sscanf",
|
||||
"vsscanf",
|
||||
],
|
||||
"cpp_anonymous": ["_GLOBAL__N_", "n$"],
|
||||
"cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"],
|
||||
"exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"],
|
||||
"static_init": ["_GLOBAL__sub_I_"],
|
||||
"mdns_lib": ["mdns"],
|
||||
"phy_radio": [
|
||||
"phy_",
|
||||
"rf_",
|
||||
"chip_",
|
||||
"register_chipv7",
|
||||
"pbus_",
|
||||
"bb_",
|
||||
"fe_",
|
||||
"rfcal_",
|
||||
"ram_rfcal",
|
||||
"tx_pwctrl",
|
||||
"rx_chan",
|
||||
"set_rx_gain",
|
||||
"set_chan",
|
||||
"agc_reg",
|
||||
"ram_txiq",
|
||||
"ram_txdc",
|
||||
"ram_gen_rx_gain",
|
||||
"rx_11b_opt",
|
||||
"set_rx_sense",
|
||||
"set_rx_gain_cal",
|
||||
"set_chan_dig_gain",
|
||||
"tx_pwctrl_init_cal",
|
||||
"rfcal_txiq",
|
||||
"set_tx_gain_table",
|
||||
"correct_rfpll_offset",
|
||||
"pll_correct_dcap",
|
||||
"txiq_cal_init",
|
||||
"pwdet_sar",
|
||||
"pwdet_sar2_init",
|
||||
"ram_iq_est_enable",
|
||||
"ram_rfpll_set_freq",
|
||||
"ant_wifirx_cfg",
|
||||
"ant_btrx_cfg",
|
||||
"force_txrxoff",
|
||||
"force_txrx_off",
|
||||
"tx_paon_set",
|
||||
"opt_11b_resart",
|
||||
"rfpll_1p2_opt",
|
||||
"ram_dc_iq_est",
|
||||
"ram_start_tx_tone",
|
||||
"ram_en_pwdet",
|
||||
"ram_cbw2040_cfg",
|
||||
"rxdc_est_min",
|
||||
"i2cmst_reg_init",
|
||||
"temprature_sens_read",
|
||||
"ram_restart_cal",
|
||||
"ram_write_gain_mem",
|
||||
"ram_wait_rfpll_cal_end",
|
||||
"txcal_debuge_mode",
|
||||
"ant_wifitx_cfg",
|
||||
"reg_init_begin",
|
||||
],
|
||||
"wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"],
|
||||
"wifi_lmac": ["lmac"],
|
||||
"wifi_device": ["wdev", "wDev_"],
|
||||
"power_mgmt": [
|
||||
"pm_",
|
||||
"sleep",
|
||||
"rtc_sleep",
|
||||
"light_sleep",
|
||||
"deep_sleep",
|
||||
"power_down",
|
||||
"g_pm",
|
||||
],
|
||||
"memory_mgmt": [
|
||||
"mem_",
|
||||
"memory_",
|
||||
"tlsf_",
|
||||
"memp_",
|
||||
"pbuf_",
|
||||
"pbuf_alloc",
|
||||
"pbuf_copy_partial_pbuf",
|
||||
],
|
||||
"hal_layer": ["hal_"],
|
||||
"clock_mgmt": [
|
||||
"clk_",
|
||||
"clock_",
|
||||
"rtc_clk",
|
||||
"apb_",
|
||||
"cpu_freq",
|
||||
"setCpuFrequencyMhz",
|
||||
],
|
||||
"cache_mgmt": ["cache"],
|
||||
"flash_ops": ["flash", "image_load"],
|
||||
"interrupt_handlers": [
|
||||
"isr",
|
||||
"interrupt",
|
||||
"intr_",
|
||||
"exc_",
|
||||
"exception",
|
||||
"port_IntStack",
|
||||
],
|
||||
"wrapper_functions": ["_wrapper"],
|
||||
"error_handling": ["panic", "abort", "assert", "error_", "fault"],
|
||||
"authentication": ["auth"],
|
||||
"ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"],
|
||||
"dhcp": ["dhcp", "handle_dhcp"],
|
||||
"ethernet_phy": [
|
||||
"emac_",
|
||||
"eth_phy_",
|
||||
"phy_tlk110",
|
||||
"phy_lan87",
|
||||
"phy_ip101",
|
||||
"phy_rtl",
|
||||
"phy_dp83",
|
||||
"phy_ksz",
|
||||
"lan87xx_",
|
||||
"rtl8201_",
|
||||
"ip101_",
|
||||
"ksz80xx_",
|
||||
"jl1101_",
|
||||
"dp83848_",
|
||||
"eth_on_state_changed",
|
||||
],
|
||||
"threading": ["pthread_", "thread_", "_task_"],
|
||||
"pthread": ["pthread"],
|
||||
"synchronization": ["mutex", "semaphore", "spinlock", "portMUX"],
|
||||
"math_lib": [
|
||||
"sin",
|
||||
"cos",
|
||||
"tan",
|
||||
"sqrt",
|
||||
"pow",
|
||||
"exp",
|
||||
"log",
|
||||
"atan",
|
||||
"asin",
|
||||
"acos",
|
||||
"floor",
|
||||
"ceil",
|
||||
"fabs",
|
||||
"round",
|
||||
],
|
||||
"random": ["rand", "random", "rng_", "prng"],
|
||||
"time_lib": [
|
||||
"time",
|
||||
"clock",
|
||||
"gettimeofday",
|
||||
"settimeofday",
|
||||
"localtime",
|
||||
"gmtime",
|
||||
"mktime",
|
||||
"strftime",
|
||||
],
|
||||
"console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"],
|
||||
"rom_functions": ["r_", "rom_"],
|
||||
"compiler_runtime": [
|
||||
"__divdi3",
|
||||
"__udivdi3",
|
||||
"__moddi3",
|
||||
"__muldi3",
|
||||
"__ashldi3",
|
||||
"__ashrdi3",
|
||||
"__lshrdi3",
|
||||
"__cmpdi2",
|
||||
"__fixdfdi",
|
||||
"__floatdidf",
|
||||
],
|
||||
"libgcc": ["libgcc", "_divdi3", "_udivdi3"],
|
||||
"boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"],
|
||||
"bootloader": ["bootloader_", "esp_bootloader"],
|
||||
"app_framework": ["app_", "initArduino", "setup", "loop", "Update"],
|
||||
"weak_symbols": ["__weak_"],
|
||||
"compiler_builtins": ["__builtin_"],
|
||||
"vfs": ["vfs_", "VFS"],
|
||||
"esp32_sdk": ["esp32_", "esp32c", "esp32s"],
|
||||
"usb": ["usb_", "USB", "cdc_", "CDC"],
|
||||
"i2c_driver": ["i2c_", "I2C"],
|
||||
"i2s_driver": ["i2s_", "I2S"],
|
||||
"spi_driver": ["spi_", "SPI"],
|
||||
"adc_driver": ["adc_", "ADC"],
|
||||
"dac_driver": ["dac_", "DAC"],
|
||||
"touch_driver": ["touch_", "TOUCH"],
|
||||
"pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"],
|
||||
"rmt_driver": ["rmt_", "RMT"],
|
||||
"pcnt_driver": ["pcnt_", "PCNT"],
|
||||
"can_driver": ["can_", "CAN", "twai_", "TWAI"],
|
||||
"sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"],
|
||||
"temp_sensor": ["temp_sensor", "tsens_"],
|
||||
"watchdog": ["wdt_", "WDT", "watchdog"],
|
||||
"brownout": ["brownout", "bod_"],
|
||||
"ulp": ["ulp_", "ULP"],
|
||||
"psram": ["psram", "PSRAM", "spiram", "SPIRAM"],
|
||||
"efuse": ["efuse", "EFUSE"],
|
||||
"partition": ["partition", "esp_partition"],
|
||||
"esp_event": ["esp_event", "event_loop", "event_callback"],
|
||||
"esp_console": ["esp_console", "console_"],
|
||||
"chip_specific": ["chip_", "esp_chip"],
|
||||
"esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"],
|
||||
"ipc": ["esp_ipc", "ipc_"],
|
||||
"wifi_config": [
|
||||
"g_cnxMgr",
|
||||
"gChmCxt",
|
||||
"g_ic",
|
||||
"TxRxCxt",
|
||||
"s_dp",
|
||||
"s_ni",
|
||||
"s_reg_dump",
|
||||
"packet$",
|
||||
"d_mult_table",
|
||||
"K",
|
||||
"fcstab",
|
||||
],
|
||||
"smartconfig": ["sc_ack_send"],
|
||||
"rc_calibration": ["rc_cal", "rcUpdate"],
|
||||
"noise_floor": ["noise_check"],
|
||||
"rf_calibration": [
|
||||
"set_rx_sense",
|
||||
"set_rx_gain_cal",
|
||||
"set_chan_dig_gain",
|
||||
"tx_pwctrl_init_cal",
|
||||
"rfcal_txiq",
|
||||
"set_tx_gain_table",
|
||||
"correct_rfpll_offset",
|
||||
"pll_correct_dcap",
|
||||
"txiq_cal_init",
|
||||
"pwdet_sar",
|
||||
"rx_11b_opt",
|
||||
],
|
||||
"wifi_crypto": [
|
||||
"pk_use_ecparams",
|
||||
"process_segments",
|
||||
"ccmp_",
|
||||
"rc4_",
|
||||
"aria_",
|
||||
"mgf_mask",
|
||||
"dh_group",
|
||||
"ccmp_aad_nonce",
|
||||
"ccmp_encrypt",
|
||||
"rc4_skip",
|
||||
"aria_sb1",
|
||||
"aria_sb2",
|
||||
"aria_is1",
|
||||
"aria_is2",
|
||||
"aria_sl",
|
||||
"aria_a",
|
||||
],
|
||||
"radio_control": ["fsm_input", "fsm_sconfreq"],
|
||||
"pbuf": [
|
||||
"pbuf_",
|
||||
],
|
||||
"event_group": ["xEventGroup"],
|
||||
"ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"],
|
||||
"provisioning": ["prov_", "prov_stop_and_notify"],
|
||||
"scan": ["gScanStruct"],
|
||||
"port": ["xPort"],
|
||||
"elf_loader": [
|
||||
"elf_add",
|
||||
"elf_add_note",
|
||||
"elf_add_segment",
|
||||
"process_image",
|
||||
"read_encoded",
|
||||
"read_encoded_value",
|
||||
"read_encoded_value_with_base",
|
||||
"process_image_header",
|
||||
],
|
||||
"socket_api": [
|
||||
"sockets",
|
||||
"netconn_",
|
||||
"accept_function",
|
||||
"recv_raw",
|
||||
"socket_ipv4_multicast",
|
||||
"socket_ipv6_multicast",
|
||||
],
|
||||
"igmp": ["igmp_", "igmp_send", "igmp_input"],
|
||||
"icmp6": ["icmp6_"],
|
||||
"arp": ["arp_table"],
|
||||
"ampdu": [
|
||||
"ampdu_",
|
||||
"rcAmpdu",
|
||||
"trc_onAmpduOp",
|
||||
"rcAmpduLowerRate",
|
||||
"ampdu_dispatch_upto",
|
||||
],
|
||||
"ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"],
|
||||
"rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"],
|
||||
"nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"],
|
||||
"channel_mgmt": ["chm_init", "chm_set_current_channel"],
|
||||
"trace": ["trc_init", "trc_onAmpduOp"],
|
||||
"country_code": ["country_info", "country_info_24ghz"],
|
||||
"multicore": ["do_multicore_settings"],
|
||||
"Update_lib": ["Update"],
|
||||
"stdio": [
|
||||
"__sf",
|
||||
"__sflush_r",
|
||||
"__srefill_r",
|
||||
"_impure_data",
|
||||
"_reclaim_reent",
|
||||
"_open_r",
|
||||
],
|
||||
"strncpy_ops": ["strncpy"],
|
||||
"math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"],
|
||||
"character_class": ["__chclass"],
|
||||
"camellia": ["camellia_", "camellia_feistel"],
|
||||
"crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"],
|
||||
"event_buffer": ["g_eb_list_desc", "eb_space"],
|
||||
"base_node": ["base_node_", "base_node_add_handler"],
|
||||
"file_descriptor": ["s_fd_table"],
|
||||
"tx_delay": ["tx_delay_cfg"],
|
||||
"deinit": ["deinit_functions"],
|
||||
"lcp_echo": ["LcpEchoCheck"],
|
||||
"raw_api": ["raw_bind", "raw_connect"],
|
||||
"checksum": ["process_checksum"],
|
||||
"entry_management": ["add_entry"],
|
||||
"esp_ota": ["esp_ota", "ota_", "read_otadata"],
|
||||
"http_server": [
|
||||
"httpd_",
|
||||
"parse_url_char",
|
||||
"cb_headers_complete",
|
||||
"delete_entry",
|
||||
"validate_structure",
|
||||
"config_save",
|
||||
"config_new",
|
||||
"verify_url",
|
||||
"cb_url",
|
||||
],
|
||||
"misc_system": [
|
||||
"alarm_cbs",
|
||||
"start_up",
|
||||
"tokens",
|
||||
"unhex",
|
||||
"osi_funcs_ro",
|
||||
"enum_function",
|
||||
"fragment_and_dispatch",
|
||||
"alarm_set",
|
||||
"osi_alarm_new",
|
||||
"config_set_string",
|
||||
"config_update_newest_section",
|
||||
"config_remove_key",
|
||||
"method_strings",
|
||||
"interop_match",
|
||||
"interop_database",
|
||||
"__state_table",
|
||||
"__action_table",
|
||||
"s_stub_table",
|
||||
"s_context",
|
||||
"s_mmu_ctx",
|
||||
"s_get_bus_mask",
|
||||
"hli_queue_put",
|
||||
"list_remove",
|
||||
"list_delete",
|
||||
"lock_acquire_generic",
|
||||
"is_vect_desc_usable",
|
||||
"io_mode_str",
|
||||
"__c$20233",
|
||||
"interface",
|
||||
"read_id_core",
|
||||
"subscribe_idle",
|
||||
"unsubscribe_idle",
|
||||
"s_clkout_handle",
|
||||
"lock_release_generic",
|
||||
"config_set_int",
|
||||
"config_get_int",
|
||||
"config_get_string",
|
||||
"config_has_key",
|
||||
"config_remove_section",
|
||||
"osi_alarm_init",
|
||||
"osi_alarm_deinit",
|
||||
"fixed_queue_enqueue",
|
||||
"fixed_queue_dequeue",
|
||||
"fixed_queue_new",
|
||||
"fixed_pkt_queue_enqueue",
|
||||
"fixed_pkt_queue_new",
|
||||
"list_append",
|
||||
"list_prepend",
|
||||
"list_insert_after",
|
||||
"list_contains",
|
||||
"list_get_node",
|
||||
"hash_function_blob",
|
||||
"cb_no_body",
|
||||
"cb_on_body",
|
||||
"profile_tab",
|
||||
"get_arg",
|
||||
"trim",
|
||||
"buf$",
|
||||
"process_appended_hash_and_sig$constprop$0",
|
||||
"uuidType",
|
||||
"allocate_svc_db_buf",
|
||||
"_hostname_is_ours",
|
||||
"s_hli_handlers",
|
||||
"tick_cb",
|
||||
"idle_cb",
|
||||
"input",
|
||||
"entry_find",
|
||||
"section_find",
|
||||
"find_bucket_entry_",
|
||||
"config_has_section",
|
||||
"hli_queue_create",
|
||||
"hli_queue_get",
|
||||
"hli_c_handler",
|
||||
"future_ready",
|
||||
"future_await",
|
||||
"future_new",
|
||||
"pkt_queue_enqueue",
|
||||
"pkt_queue_dequeue",
|
||||
"pkt_queue_cleanup",
|
||||
"pkt_queue_create",
|
||||
"pkt_queue_destroy",
|
||||
"fixed_pkt_queue_dequeue",
|
||||
"osi_alarm_cancel",
|
||||
"osi_alarm_is_active",
|
||||
"osi_sem_take",
|
||||
"osi_event_create",
|
||||
"osi_event_bind",
|
||||
"alarm_cb_handler",
|
||||
"list_foreach",
|
||||
"list_back",
|
||||
"list_front",
|
||||
"list_clear",
|
||||
"fixed_queue_try_peek_first",
|
||||
"translate_path",
|
||||
"get_idx",
|
||||
"find_key",
|
||||
"init",
|
||||
"end",
|
||||
"start",
|
||||
"set_read_value",
|
||||
"copy_address_list",
|
||||
"copy_and_key",
|
||||
"sdk_cfg_opts",
|
||||
"leftshift_onebit",
|
||||
"config_section_end",
|
||||
"config_section_begin",
|
||||
"find_entry_and_check_all_reset",
|
||||
"image_validate",
|
||||
"xPendingReadyList",
|
||||
"vListInitialise",
|
||||
"lock_init_generic",
|
||||
"ant_bttx_cfg",
|
||||
"ant_dft_cfg",
|
||||
"cs_send_to_ctrl_sock",
|
||||
"config_llc_util_funcs_reset",
|
||||
"make_set_adv_report_flow_control",
|
||||
"make_set_event_mask",
|
||||
"raw_new",
|
||||
"raw_remove",
|
||||
"BTE_InitStack",
|
||||
"parse_read_local_supported_features_response",
|
||||
"__math_invalidf",
|
||||
"tinytens",
|
||||
"__mprec_tinytens",
|
||||
"__mprec_bigtens",
|
||||
"vRingbufferDelete",
|
||||
"vRingbufferDeleteWithCaps",
|
||||
"vRingbufferReturnItem",
|
||||
"vRingbufferReturnItemFromISR",
|
||||
"get_acl_data_size_ble",
|
||||
"get_features_ble",
|
||||
"get_features_classic",
|
||||
"get_acl_packet_size_ble",
|
||||
"get_acl_packet_size_classic",
|
||||
"supports_extended_inquiry_response",
|
||||
"supports_rssi_with_inquiry_results",
|
||||
"supports_interlaced_inquiry_scan",
|
||||
"supports_reading_remote_extended_features",
|
||||
],
|
||||
"bluetooth_ll": [
|
||||
"lld_pdu_",
|
||||
"ld_acl_",
|
||||
"lld_stop_ind_handler",
|
||||
"lld_evt_winsize_change",
|
||||
"config_lld_evt_funcs_reset",
|
||||
"config_lld_funcs_reset",
|
||||
"config_llm_funcs_reset",
|
||||
"llm_set_long_adv_data",
|
||||
"lld_retry_tx_prog",
|
||||
"llc_link_sup_to_ind_handler",
|
||||
"config_llc_funcs_reset",
|
||||
"lld_evt_rxwin_compute",
|
||||
"config_btdm_funcs_reset",
|
||||
"config_ea_funcs_reset",
|
||||
"llc_defalut_state_tab_reset",
|
||||
"config_rwip_funcs_reset",
|
||||
"ke_lmp_rx_flooding_detect",
|
||||
],
|
||||
}
|
||||
|
||||
# Demangled patterns: patterns found in demangled C++ names
|
||||
DEMANGLED_PATTERNS = {
|
||||
"gpio_driver": ["GPIO"],
|
||||
"uart_driver": ["UART"],
|
||||
"network_stack": [
|
||||
"lwip",
|
||||
"tcp",
|
||||
"udp",
|
||||
"ip4",
|
||||
"ip6",
|
||||
"dhcp",
|
||||
"dns",
|
||||
"netif",
|
||||
"ethernet",
|
||||
"ppp",
|
||||
"slip",
|
||||
],
|
||||
"wifi_stack": ["NetworkInterface"],
|
||||
"nimble_bt": [
|
||||
"nimble",
|
||||
"NimBLE",
|
||||
"ble_hs",
|
||||
"ble_gap",
|
||||
"ble_gatt",
|
||||
"ble_att",
|
||||
"ble_l2cap",
|
||||
"ble_sm",
|
||||
],
|
||||
"crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"],
|
||||
"cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"],
|
||||
"static_init": ["__static_initialization"],
|
||||
"rtti": ["__type_info", "__class_type_info"],
|
||||
"web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"],
|
||||
"async_tcp": ["AsyncClient", "AsyncServer"],
|
||||
"mdns_lib": ["mdns"],
|
||||
"json_lib": [
|
||||
"ArduinoJson",
|
||||
"JsonDocument",
|
||||
"JsonArray",
|
||||
"JsonObject",
|
||||
"deserialize",
|
||||
"serialize",
|
||||
],
|
||||
"http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"],
|
||||
"logging": ["log", "Log", "print", "Print", "diag_"],
|
||||
"authentication": ["checkDigestAuthentication"],
|
||||
"libgcc": ["libgcc"],
|
||||
"esp_system": ["esp_", "ESP"],
|
||||
"arduino": ["arduino"],
|
||||
"nvs": ["nvs_", "_ZTVN3nvs", "nvs::"],
|
||||
"filesystem": ["spiffs", "vfs"],
|
||||
"libc": ["newlib"],
|
||||
}
|
||||
|
||||
# Patterns for categorizing ESPHome core symbols into subcategories
|
||||
CORE_SUBCATEGORY_PATTERNS = {
|
||||
"Component Framework": ["Component"],
|
||||
"Application Core": ["Application"],
|
||||
"Scheduler": ["Scheduler"],
|
||||
"Component Iterator": ["ComponentIterator"],
|
||||
"Helper Functions": ["Helpers", "helpers"],
|
||||
"Preferences/Storage": ["Preferences", "ESPPreferences"],
|
||||
"I/O Utilities": ["HighFrequencyLoopRequester"],
|
||||
"String Utilities": ["str_"],
|
||||
"Bit Utilities": ["reverse_bits"],
|
||||
"Data Conversion": ["convert_"],
|
||||
"Network Utilities": ["network", "IPAddress"],
|
||||
"API Protocol": ["api::"],
|
||||
"WiFi Manager": ["wifi::"],
|
||||
"MQTT Client": ["mqtt::"],
|
||||
"Logger": ["logger::"],
|
||||
"OTA Updates": ["ota::"],
|
||||
"Web Server": ["web_server::"],
|
||||
"Time Management": ["time::"],
|
||||
"Sensor Framework": ["sensor::"],
|
||||
"Binary Sensor": ["binary_sensor::"],
|
||||
"Switch Framework": ["switch_::"],
|
||||
"Light Framework": ["light::"],
|
||||
"Climate Framework": ["climate::"],
|
||||
"Cover Framework": ["cover::"],
|
||||
}
|
||||
121
esphome/analyze_memory/helpers.py
Normal file
121
esphome/analyze_memory/helpers.py
Normal file
@@ -0,0 +1,121 @@
|
||||
"""Helper functions for memory analysis."""
|
||||
|
||||
from functools import cache
|
||||
from pathlib import Path
|
||||
|
||||
from .const import SECTION_MAPPING
|
||||
|
||||
# Import namespace constant from parent module
|
||||
# Note: This would create a circular import if done at module level,
|
||||
# so we'll define it locally here as well
|
||||
_NAMESPACE_ESPHOME = "esphome::"
|
||||
|
||||
|
||||
# Get the list of actual ESPHome components by scanning the components directory
|
||||
@cache
|
||||
def get_esphome_components():
|
||||
"""Get set of actual ESPHome components from the components directory."""
|
||||
# Find the components directory relative to this file
|
||||
# Go up two levels from analyze_memory/helpers.py to esphome/
|
||||
current_dir = Path(__file__).parent.parent
|
||||
components_dir = current_dir / "components"
|
||||
|
||||
if not components_dir.exists() or not components_dir.is_dir():
|
||||
return frozenset()
|
||||
|
||||
return frozenset(
|
||||
item.name
|
||||
for item in components_dir.iterdir()
|
||||
if item.is_dir()
|
||||
and not item.name.startswith(".")
|
||||
and not item.name.startswith("__")
|
||||
)
|
||||
|
||||
|
||||
@cache
|
||||
def get_component_class_patterns(component_name: str) -> list[str]:
|
||||
"""Generate component class name patterns for symbol matching.
|
||||
|
||||
Args:
|
||||
component_name: The component name (e.g., "ota", "wifi", "api")
|
||||
|
||||
Returns:
|
||||
List of pattern strings to match against demangled symbols
|
||||
"""
|
||||
component_upper = component_name.upper()
|
||||
component_camel = component_name.replace("_", "").title()
|
||||
return [
|
||||
f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent
|
||||
f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent
|
||||
f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent
|
||||
f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent
|
||||
]
|
||||
|
||||
|
||||
def map_section_name(raw_section: str) -> str | None:
|
||||
"""Map raw section name to standard section.
|
||||
|
||||
Args:
|
||||
raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1")
|
||||
|
||||
Returns:
|
||||
Standard section name (".text", ".rodata", ".data", ".bss") or None
|
||||
"""
|
||||
for standard_section, patterns in SECTION_MAPPING.items():
|
||||
if any(pattern in raw_section for pattern in patterns):
|
||||
return standard_section
|
||||
return None
|
||||
|
||||
|
||||
def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None:
|
||||
"""Parse a single symbol line from objdump output.
|
||||
|
||||
Args:
|
||||
line: Line from objdump -t output
|
||||
|
||||
Returns:
|
||||
Tuple of (section, name, size, address) or None if not a valid symbol.
|
||||
Format: address l/g w/d F/O section size name
|
||||
Example: 40084870 l F .iram0.text 00000000 _xt_user_exc
|
||||
"""
|
||||
parts = line.split()
|
||||
if len(parts) < 5:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Validate and extract address
|
||||
address = parts[0]
|
||||
int(address, 16)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
# Look for F (function) or O (object) flag
|
||||
if "F" not in parts and "O" not in parts:
|
||||
return None
|
||||
|
||||
# Find section, size, and name
|
||||
for i, part in enumerate(parts):
|
||||
if not part.startswith("."):
|
||||
continue
|
||||
|
||||
section = map_section_name(part)
|
||||
if not section:
|
||||
break
|
||||
|
||||
# Need at least size field after section
|
||||
if i + 1 >= len(parts):
|
||||
break
|
||||
|
||||
try:
|
||||
size = int(parts[i + 1], 16)
|
||||
except ValueError:
|
||||
break
|
||||
|
||||
# Need symbol name and non-zero size
|
||||
if i + 2 >= len(parts) or size == 0:
|
||||
break
|
||||
|
||||
name = " ".join(parts[i + 2 :])
|
||||
return (section, name, size, address)
|
||||
|
||||
return None
|
||||
@@ -15,7 +15,10 @@ from esphome.const import (
|
||||
CONF_TYPE_ID,
|
||||
CONF_UPDATE_INTERVAL,
|
||||
)
|
||||
from esphome.core import ID
|
||||
from esphome.cpp_generator import MockObj, MockObjClass, TemplateArgsType
|
||||
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
|
||||
from esphome.types import ConfigType
|
||||
from esphome.util import Registry
|
||||
|
||||
|
||||
@@ -49,11 +52,11 @@ def maybe_conf(conf, *validators):
|
||||
return validate
|
||||
|
||||
|
||||
def register_action(name, action_type, schema):
|
||||
def register_action(name: str, action_type: MockObjClass, schema: cv.Schema):
|
||||
return ACTION_REGISTRY.register(name, action_type, schema)
|
||||
|
||||
|
||||
def register_condition(name, condition_type, schema):
|
||||
def register_condition(name: str, condition_type: MockObjClass, schema: cv.Schema):
|
||||
return CONDITION_REGISTRY.register(name, condition_type, schema)
|
||||
|
||||
|
||||
@@ -164,43 +167,78 @@ XorCondition = cg.esphome_ns.class_("XorCondition", Condition)
|
||||
|
||||
|
||||
@register_condition("and", AndCondition, validate_condition_list)
|
||||
async def and_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def and_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("or", OrCondition, validate_condition_list)
|
||||
async def or_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def or_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("all", AndCondition, validate_condition_list)
|
||||
async def all_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def all_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("any", OrCondition, validate_condition_list)
|
||||
async def any_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def any_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("not", NotCondition, validate_potentially_and_condition)
|
||||
async def not_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def not_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
condition = await build_condition(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, condition)
|
||||
|
||||
|
||||
@register_condition("xor", XorCondition, validate_condition_list)
|
||||
async def xor_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def xor_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("lambda", LambdaCondition, cv.returning_lambda)
|
||||
async def lambda_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def lambda_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
lambda_ = await cg.process_lambda(config, args, return_type=bool)
|
||||
return cg.new_Pvariable(condition_id, template_arg, lambda_)
|
||||
|
||||
@@ -217,7 +255,12 @@ async def lambda_condition_to_code(config, condition_id, template_arg, args):
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
)
|
||||
async def for_condition_to_code(config, condition_id, template_arg, args):
|
||||
async def for_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
condition = await build_condition(
|
||||
config[CONF_CONDITION], cg.TemplateArguments(), []
|
||||
)
|
||||
@@ -231,7 +274,12 @@ async def for_condition_to_code(config, condition_id, template_arg, args):
|
||||
@register_action(
|
||||
"delay", DelayAction, cv.templatable(cv.positive_time_period_milliseconds)
|
||||
)
|
||||
async def delay_action_to_code(config, action_id, template_arg, args):
|
||||
async def delay_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
await cg.register_component(var, {})
|
||||
template_ = await cg.templatable(config, args, cg.uint32)
|
||||
@@ -256,10 +304,15 @@ async def delay_action_to_code(config, action_id, template_arg, args):
|
||||
cv.has_at_least_one_key(CONF_CONDITION, CONF_ANY, CONF_ALL),
|
||||
),
|
||||
)
|
||||
async def if_action_to_code(config, action_id, template_arg, args):
|
||||
async def if_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
cond_conf = next(el for el in config if el in (CONF_ANY, CONF_ALL, CONF_CONDITION))
|
||||
conditions = await build_condition(config[cond_conf], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
condition = await build_condition(config[cond_conf], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, condition)
|
||||
if CONF_THEN in config:
|
||||
actions = await build_action_list(config[CONF_THEN], template_arg, args)
|
||||
cg.add(var.add_then(actions))
|
||||
@@ -279,9 +332,14 @@ async def if_action_to_code(config, action_id, template_arg, args):
|
||||
}
|
||||
),
|
||||
)
|
||||
async def while_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
async def while_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
condition = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, condition)
|
||||
actions = await build_action_list(config[CONF_THEN], template_arg, args)
|
||||
cg.add(var.add_then(actions))
|
||||
return var
|
||||
@@ -297,7 +355,12 @@ async def while_action_to_code(config, action_id, template_arg, args):
|
||||
}
|
||||
),
|
||||
)
|
||||
async def repeat_action_to_code(config, action_id, template_arg, args):
|
||||
async def repeat_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
count_template = await cg.templatable(config[CONF_COUNT], args, cg.uint32)
|
||||
cg.add(var.set_count(count_template))
|
||||
@@ -320,9 +383,14 @@ _validate_wait_until = cv.maybe_simple_value(
|
||||
|
||||
|
||||
@register_action("wait_until", WaitUntilAction, _validate_wait_until)
|
||||
async def wait_until_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
async def wait_until_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
condition = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, condition)
|
||||
if CONF_TIMEOUT in config:
|
||||
template_ = await cg.templatable(config[CONF_TIMEOUT], args, cg.uint32)
|
||||
cg.add(var.set_timeout_value(template_))
|
||||
@@ -331,7 +399,12 @@ async def wait_until_action_to_code(config, action_id, template_arg, args):
|
||||
|
||||
|
||||
@register_action("lambda", LambdaAction, cv.lambda_)
|
||||
async def lambda_action_to_code(config, action_id, template_arg, args):
|
||||
async def lambda_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
lambda_ = await cg.process_lambda(config, args, return_type=cg.void)
|
||||
return cg.new_Pvariable(action_id, template_arg, lambda_)
|
||||
|
||||
@@ -345,7 +418,12 @@ async def lambda_action_to_code(config, action_id, template_arg, args):
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_update_action_to_code(config, action_id, template_arg, args):
|
||||
async def component_update_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, comp)
|
||||
|
||||
@@ -359,7 +437,12 @@ async def component_update_action_to_code(config, action_id, template_arg, args)
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_suspend_action_to_code(config, action_id, template_arg, args):
|
||||
async def component_suspend_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, comp)
|
||||
|
||||
@@ -376,7 +459,12 @@ async def component_suspend_action_to_code(config, action_id, template_arg, args
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_resume_action_to_code(config, action_id, template_arg, args):
|
||||
async def component_resume_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, comp)
|
||||
if CONF_UPDATE_INTERVAL in config:
|
||||
@@ -385,7 +473,9 @@ async def component_resume_action_to_code(config, action_id, template_arg, args)
|
||||
return var
|
||||
|
||||
|
||||
async def build_action(full_config, template_arg, args):
|
||||
async def build_action(
|
||||
full_config: ConfigType, template_arg: cg.TemplateArguments, args: TemplateArgsType
|
||||
) -> MockObj:
|
||||
registry_entry, config = cg.extract_registry_entry_config(
|
||||
ACTION_REGISTRY, full_config
|
||||
)
|
||||
@@ -394,15 +484,19 @@ async def build_action(full_config, template_arg, args):
|
||||
return await builder(config, action_id, template_arg, args)
|
||||
|
||||
|
||||
async def build_action_list(config, templ, arg_type):
|
||||
actions = []
|
||||
async def build_action_list(
|
||||
config: list[ConfigType], templ: cg.TemplateArguments, arg_type: TemplateArgsType
|
||||
) -> list[MockObj]:
|
||||
actions: list[MockObj] = []
|
||||
for conf in config:
|
||||
action = await build_action(conf, templ, arg_type)
|
||||
actions.append(action)
|
||||
return actions
|
||||
|
||||
|
||||
async def build_condition(full_config, template_arg, args):
|
||||
async def build_condition(
|
||||
full_config: ConfigType, template_arg: cg.TemplateArguments, args: TemplateArgsType
|
||||
) -> MockObj:
|
||||
registry_entry, config = cg.extract_registry_entry_config(
|
||||
CONDITION_REGISTRY, full_config
|
||||
)
|
||||
@@ -411,15 +505,19 @@ async def build_condition(full_config, template_arg, args):
|
||||
return await builder(config, action_id, template_arg, args)
|
||||
|
||||
|
||||
async def build_condition_list(config, templ, args):
|
||||
conditions = []
|
||||
async def build_condition_list(
|
||||
config: ConfigType, templ: cg.TemplateArguments, args: TemplateArgsType
|
||||
) -> list[MockObj]:
|
||||
conditions: list[MockObj] = []
|
||||
for conf in config:
|
||||
condition = await build_condition(conf, templ, args)
|
||||
conditions.append(condition)
|
||||
return conditions
|
||||
|
||||
|
||||
async def build_automation(trigger, args, config):
|
||||
async def build_automation(
|
||||
trigger: MockObj, args: TemplateArgsType, config: ConfigType
|
||||
) -> MockObj:
|
||||
arg_types = [arg[0] for arg in args]
|
||||
templ = cg.TemplateArguments(*arg_types)
|
||||
obj = cg.new_Pvariable(config[CONF_AUTOMATION_ID], templ, trigger)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import os
|
||||
|
||||
from esphome.const import __version__
|
||||
from esphome.core import CORE
|
||||
from esphome.helpers import mkdir_p, read_file, write_file_if_changed
|
||||
@@ -63,7 +61,7 @@ def write_ini(content):
|
||||
update_storage_json()
|
||||
path = CORE.relative_build_path("platformio.ini")
|
||||
|
||||
if os.path.isfile(path):
|
||||
if path.is_file():
|
||||
text = read_file(path)
|
||||
content_format = find_begin_end(
|
||||
text, INI_AUTO_GENERATE_BEGIN, INI_AUTO_GENERATE_END
|
||||
|
||||
@@ -12,6 +12,7 @@ from esphome.cpp_generator import ( # noqa: F401
|
||||
ArrayInitializer,
|
||||
Expression,
|
||||
LineComment,
|
||||
LogStringLiteral,
|
||||
MockObj,
|
||||
MockObjClass,
|
||||
Pvariable,
|
||||
|
||||
@@ -26,12 +26,12 @@ uint32_t Animation::get_animation_frame_count() const { return this->animation_f
|
||||
int Animation::get_current_frame() const { return this->current_frame_; }
|
||||
void Animation::next_frame() {
|
||||
this->current_frame_++;
|
||||
if (loop_count_ && this->current_frame_ == loop_end_frame_ &&
|
||||
if (loop_count_ && static_cast<uint32_t>(this->current_frame_) == loop_end_frame_ &&
|
||||
(this->loop_current_iteration_ < loop_count_ || loop_count_ < 0)) {
|
||||
this->current_frame_ = loop_start_frame_;
|
||||
this->loop_current_iteration_++;
|
||||
}
|
||||
if (this->current_frame_ >= animation_frame_count_) {
|
||||
if (static_cast<uint32_t>(this->current_frame_) >= animation_frame_count_) {
|
||||
this->loop_current_iteration_ = 1;
|
||||
this->current_frame_ = 0;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import base64
|
||||
import logging
|
||||
|
||||
from esphome import automation
|
||||
from esphome.automation import Condition
|
||||
@@ -8,34 +9,59 @@ import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_CAPTURE_RESPONSE,
|
||||
CONF_DATA,
|
||||
CONF_DATA_TEMPLATE,
|
||||
CONF_EVENT,
|
||||
CONF_ID,
|
||||
CONF_KEY,
|
||||
CONF_MAX_CONNECTIONS,
|
||||
CONF_ON_CLIENT_CONNECTED,
|
||||
CONF_ON_CLIENT_DISCONNECTED,
|
||||
CONF_ON_ERROR,
|
||||
CONF_ON_SUCCESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_REBOOT_TIMEOUT,
|
||||
CONF_RESPONSE_TEMPLATE,
|
||||
CONF_SERVICE,
|
||||
CONF_SERVICES,
|
||||
CONF_TAG,
|
||||
CONF_TRIGGER_ID,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, ID, CoroPriority, coroutine_with_priority
|
||||
from esphome.cpp_generator import TemplateArgsType
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "api"
|
||||
DEPENDENCIES = ["network"]
|
||||
AUTO_LOAD = ["socket"]
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
|
||||
|
||||
def AUTO_LOAD(config: ConfigType) -> list[str]:
|
||||
"""Conditionally auto-load json only when capture_response is used."""
|
||||
base = ["socket"]
|
||||
|
||||
# Check if any homeassistant.action/homeassistant.service has capture_response: true
|
||||
# This flag is set during config validation in _validate_response_config
|
||||
if not config or CORE.data.get(DOMAIN, {}).get(CONF_CAPTURE_RESPONSE, False):
|
||||
return base + ["json"]
|
||||
|
||||
return base
|
||||
|
||||
|
||||
api_ns = cg.esphome_ns.namespace("api")
|
||||
APIServer = api_ns.class_("APIServer", cg.Component, cg.Controller)
|
||||
HomeAssistantServiceCallAction = api_ns.class_(
|
||||
"HomeAssistantServiceCallAction", automation.Action
|
||||
)
|
||||
ActionResponse = api_ns.class_("ActionResponse")
|
||||
HomeAssistantActionResponseTrigger = api_ns.class_(
|
||||
"HomeAssistantActionResponseTrigger", automation.Trigger
|
||||
)
|
||||
APIConnectedCondition = api_ns.class_("APIConnectedCondition", Condition)
|
||||
|
||||
UserServiceTrigger = api_ns.class_("UserServiceTrigger", automation.Trigger)
|
||||
@@ -55,6 +81,8 @@ CONF_BATCH_DELAY = "batch_delay"
|
||||
CONF_CUSTOM_SERVICES = "custom_services"
|
||||
CONF_HOMEASSISTANT_SERVICES = "homeassistant_services"
|
||||
CONF_HOMEASSISTANT_STATES = "homeassistant_states"
|
||||
CONF_LISTEN_BACKLOG = "listen_backlog"
|
||||
CONF_MAX_SEND_QUEUE = "max_send_queue"
|
||||
|
||||
|
||||
def validate_encryption_key(value):
|
||||
@@ -101,6 +129,32 @@ def _encryption_schema(config):
|
||||
return ENCRYPTION_SCHEMA(config)
|
||||
|
||||
|
||||
def _validate_api_config(config: ConfigType) -> ConfigType:
|
||||
"""Validate API configuration with mutual exclusivity check and deprecation warning."""
|
||||
# Check if both password and encryption are configured
|
||||
has_password = CONF_PASSWORD in config and config[CONF_PASSWORD]
|
||||
has_encryption = CONF_ENCRYPTION in config
|
||||
|
||||
if has_password and has_encryption:
|
||||
raise cv.Invalid(
|
||||
"The 'password' and 'encryption' options are mutually exclusive. "
|
||||
"The API client only supports one authentication method at a time. "
|
||||
"Please remove one of them. "
|
||||
"Note: 'password' authentication is deprecated and will be removed in version 2026.1.0. "
|
||||
"We strongly recommend using 'encryption' instead for better security."
|
||||
)
|
||||
|
||||
# Warn about password deprecation
|
||||
if has_password:
|
||||
_LOGGER.warning(
|
||||
"API 'password' authentication has been deprecated since May 2022 and will be removed in version 2026.1.0. "
|
||||
"Please migrate to the 'encryption' configuration. "
|
||||
"See https://esphome.io/components/api.html#configuration-variables"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -128,9 +182,46 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_ON_CLIENT_DISCONNECTED): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
# Connection limits to prevent memory exhaustion on resource-constrained devices
|
||||
# Each connection uses ~500-1000 bytes of RAM plus system resources
|
||||
# Platform defaults based on available RAM and network stack implementation:
|
||||
cv.SplitDefault(
|
||||
CONF_LISTEN_BACKLOG,
|
||||
esp8266=1, # Limited RAM (~40KB free), LWIP raw sockets
|
||||
esp32=4, # More RAM (520KB), BSD sockets
|
||||
rp2040=1, # Limited RAM (264KB), LWIP raw sockets like ESP8266
|
||||
bk72xx=4, # Moderate RAM, BSD-style sockets
|
||||
rtl87xx=4, # Moderate RAM, BSD-style sockets
|
||||
host=4, # Abundant resources
|
||||
ln882x=4, # Moderate RAM
|
||||
): cv.int_range(min=1, max=10),
|
||||
cv.SplitDefault(
|
||||
CONF_MAX_CONNECTIONS,
|
||||
esp8266=4, # ~40KB free RAM, each connection uses ~500-1000 bytes
|
||||
esp32=8, # 520KB RAM available
|
||||
rp2040=4, # 264KB RAM but LWIP constraints
|
||||
bk72xx=8, # Moderate RAM
|
||||
rtl87xx=8, # Moderate RAM
|
||||
host=8, # Abundant resources
|
||||
ln882x=8, # Moderate RAM
|
||||
): cv.int_range(min=1, max=20),
|
||||
# Maximum queued send buffers per connection before dropping connection
|
||||
# Each buffer uses ~8-12 bytes overhead plus actual message size
|
||||
# Platform defaults based on available RAM and typical message rates:
|
||||
cv.SplitDefault(
|
||||
CONF_MAX_SEND_QUEUE,
|
||||
esp8266=5, # Limited RAM, need to fail fast
|
||||
esp32=8, # More RAM, can buffer more
|
||||
rp2040=5, # Limited RAM
|
||||
bk72xx=8, # Moderate RAM
|
||||
rtl87xx=8, # Moderate RAM
|
||||
host=16, # Abundant resources
|
||||
ln882x=8, # Moderate RAM
|
||||
): cv.int_range(min=1, max=64),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.rename_key(CONF_SERVICES, CONF_ACTIONS),
|
||||
_validate_api_config,
|
||||
)
|
||||
|
||||
|
||||
@@ -145,6 +236,11 @@ async def to_code(config):
|
||||
cg.add(var.set_password(config[CONF_PASSWORD]))
|
||||
cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
|
||||
cg.add(var.set_batch_delay(config[CONF_BATCH_DELAY]))
|
||||
if CONF_LISTEN_BACKLOG in config:
|
||||
cg.add(var.set_listen_backlog(config[CONF_LISTEN_BACKLOG]))
|
||||
if CONF_MAX_CONNECTIONS in config:
|
||||
cg.add(var.set_max_connections(config[CONF_MAX_CONNECTIONS]))
|
||||
cg.add_define("API_MAX_SEND_QUEUE", config[CONF_MAX_SEND_QUEUE])
|
||||
|
||||
# Set USE_API_SERVICES if any services are enabled
|
||||
if config.get(CONF_ACTIONS) or config[CONF_CUSTOM_SERVICES]:
|
||||
@@ -193,6 +289,7 @@ async def to_code(config):
|
||||
if key := encryption_config.get(CONF_KEY):
|
||||
decoded = base64.b64decode(key)
|
||||
cg.add(var.set_noise_psk(list(decoded)))
|
||||
cg.add_define("USE_API_NOISE_PSK_FROM_YAML")
|
||||
else:
|
||||
# No key provided, but encryption desired
|
||||
# This will allow a plaintext client to provide a noise key,
|
||||
@@ -212,6 +309,29 @@ async def to_code(config):
|
||||
KEY_VALUE_SCHEMA = cv.Schema({cv.string: cv.templatable(cv.string_strict)})
|
||||
|
||||
|
||||
def _validate_response_config(config: ConfigType) -> ConfigType:
|
||||
# Validate dependencies:
|
||||
# - response_template requires capture_response: true
|
||||
# - capture_response: true requires on_success
|
||||
if CONF_RESPONSE_TEMPLATE in config and not config[CONF_CAPTURE_RESPONSE]:
|
||||
raise cv.Invalid(
|
||||
f"`{CONF_RESPONSE_TEMPLATE}` requires `{CONF_CAPTURE_RESPONSE}: true` to be set.",
|
||||
path=[CONF_RESPONSE_TEMPLATE],
|
||||
)
|
||||
|
||||
if config[CONF_CAPTURE_RESPONSE] and CONF_ON_SUCCESS not in config:
|
||||
raise cv.Invalid(
|
||||
f"`{CONF_CAPTURE_RESPONSE}: true` requires `{CONF_ON_SUCCESS}` to be set.",
|
||||
path=[CONF_CAPTURE_RESPONSE],
|
||||
)
|
||||
|
||||
# Track if any action uses capture_response for AUTO_LOAD
|
||||
if config[CONF_CAPTURE_RESPONSE]:
|
||||
CORE.data.setdefault(DOMAIN, {})[CONF_CAPTURE_RESPONSE] = True
|
||||
|
||||
return config
|
||||
|
||||
|
||||
HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -227,10 +347,15 @@ HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_VARIABLES, default={}): cv.Schema(
|
||||
{cv.string: cv.returning_lambda}
|
||||
),
|
||||
cv.Optional(CONF_RESPONSE_TEMPLATE): cv.templatable(cv.string),
|
||||
cv.Optional(CONF_CAPTURE_RESPONSE, default=False): cv.boolean,
|
||||
cv.Optional(CONF_ON_SUCCESS): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_ON_ERROR): automation.validate_automation(single=True),
|
||||
}
|
||||
),
|
||||
cv.has_exactly_one_key(CONF_SERVICE, CONF_ACTION),
|
||||
cv.rename_key(CONF_SERVICE, CONF_ACTION),
|
||||
_validate_response_config,
|
||||
)
|
||||
|
||||
|
||||
@@ -244,21 +369,67 @@ HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
|
||||
HomeAssistantServiceCallAction,
|
||||
HOMEASSISTANT_ACTION_ACTION_SCHEMA,
|
||||
)
|
||||
async def homeassistant_service_to_code(config, action_id, template_arg, args):
|
||||
async def homeassistant_service_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
):
|
||||
cg.add_define("USE_API_HOMEASSISTANT_SERVICES")
|
||||
serv = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, False)
|
||||
templ = await cg.templatable(config[CONF_ACTION], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
|
||||
if on_error := config.get(CONF_ON_ERROR):
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES")
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES_ERRORS")
|
||||
cg.add(var.set_wants_status())
|
||||
await automation.build_automation(
|
||||
var.get_error_trigger(),
|
||||
[(cg.std_string, "error"), *args],
|
||||
on_error,
|
||||
)
|
||||
|
||||
if on_success := config.get(CONF_ON_SUCCESS):
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES")
|
||||
cg.add(var.set_wants_status())
|
||||
if config[CONF_CAPTURE_RESPONSE]:
|
||||
cg.add(var.set_wants_response())
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON")
|
||||
await automation.build_automation(
|
||||
var.get_success_trigger_with_response(),
|
||||
[(cg.JsonObjectConst, "response"), *args],
|
||||
on_success,
|
||||
)
|
||||
|
||||
if response_template := config.get(CONF_RESPONSE_TEMPLATE):
|
||||
templ = await cg.templatable(response_template, args, cg.std_string)
|
||||
cg.add(var.set_response_template(templ))
|
||||
|
||||
else:
|
||||
await automation.build_automation(
|
||||
var.get_success_trigger(),
|
||||
args,
|
||||
on_success,
|
||||
)
|
||||
|
||||
return var
|
||||
|
||||
|
||||
@@ -294,15 +465,23 @@ async def homeassistant_event_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
templ = await cg.templatable(config[CONF_EVENT], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
|
||||
return var
|
||||
|
||||
|
||||
@@ -325,6 +504,8 @@ async def homeassistant_tag_scanned_to_code(config, action_id, template_arg, arg
|
||||
serv = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
cg.add(var.set_service("esphome.tag_scanned"))
|
||||
# Initialize FixedVector with exact size (1 data field)
|
||||
cg.add(var.init_data(1))
|
||||
templ = await cg.templatable(config[CONF_TAG], args, cg.std_string)
|
||||
cg.add(var.add_data("tag_id", templ))
|
||||
return var
|
||||
|
||||
@@ -7,7 +7,7 @@ service APIConnection {
|
||||
option (needs_setup_connection) = false;
|
||||
option (needs_authentication) = false;
|
||||
}
|
||||
rpc connect (ConnectRequest) returns (ConnectResponse) {
|
||||
rpc authenticate (AuthenticationRequest) returns (AuthenticationResponse) {
|
||||
option (needs_setup_connection) = false;
|
||||
option (needs_authentication) = false;
|
||||
}
|
||||
@@ -66,6 +66,9 @@ service APIConnection {
|
||||
rpc voice_assistant_set_configuration(VoiceAssistantSetConfiguration) returns (void) {}
|
||||
|
||||
rpc alarm_control_panel_command (AlarmControlPanelCommandRequest) returns (void) {}
|
||||
|
||||
rpc zwave_proxy_frame(ZWaveProxyFrame) returns (void) {}
|
||||
rpc zwave_proxy_request(ZWaveProxyRequest) returns (void) {}
|
||||
}
|
||||
|
||||
|
||||
@@ -99,7 +102,7 @@ message HelloRequest {
|
||||
// For example "Home Assistant"
|
||||
// Not strictly necessary to send but nice for debugging
|
||||
// purposes.
|
||||
string client_info = 1;
|
||||
string client_info = 1 [(pointer_to_buffer) = true];
|
||||
uint32 api_version_major = 2;
|
||||
uint32 api_version_minor = 3;
|
||||
}
|
||||
@@ -129,21 +132,23 @@ message HelloResponse {
|
||||
|
||||
// Message sent at the beginning of each connection to authenticate the client
|
||||
// Can only be sent by the client and only at the beginning of the connection
|
||||
message ConnectRequest {
|
||||
message AuthenticationRequest {
|
||||
option (id) = 3;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_PASSWORD";
|
||||
|
||||
// The password to log in with
|
||||
string password = 1;
|
||||
string password = 1 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
// Confirmation of successful connection. After this the connection is available for all traffic.
|
||||
// Can only be sent by the server and only at the beginning of the connection
|
||||
message ConnectResponse {
|
||||
message AuthenticationResponse {
|
||||
option (id) = 4;
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_PASSWORD";
|
||||
|
||||
bool invalid_password = 1;
|
||||
}
|
||||
@@ -252,6 +257,10 @@ message DeviceInfoResponse {
|
||||
|
||||
// Top-level area info to phase out suggested_area
|
||||
AreaInfo area = 22 [(field_ifdef) = "USE_AREAS"];
|
||||
|
||||
// Indicates if Z-Wave proxy support is available and features supported
|
||||
uint32 zwave_proxy_feature_flags = 23 [(field_ifdef) = "USE_ZWAVE_PROXY"];
|
||||
uint32 zwave_home_id = 24 [(field_ifdef) = "USE_ZWAVE_PROXY"];
|
||||
}
|
||||
|
||||
message ListEntitiesRequest {
|
||||
@@ -497,7 +506,7 @@ message ListEntitiesLightResponse {
|
||||
string name = 3;
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
repeated ColorMode supported_color_modes = 12 [(container_pointer) = "std::set<light::ColorMode>"];
|
||||
repeated ColorMode supported_color_modes = 12 [(container_pointer_no_template) = "light::ColorModeMask"];
|
||||
// next four supports_* are for legacy clients, newer clients should use color modes
|
||||
// Deprecated in API version 1.6
|
||||
bool legacy_supports_brightness = 5 [deprecated=true];
|
||||
@@ -760,17 +769,33 @@ message HomeassistantServiceMap {
|
||||
string value = 2 [(no_zero_copy) = true];
|
||||
}
|
||||
|
||||
message HomeassistantServiceResponse {
|
||||
message HomeassistantActionRequest {
|
||||
option (id) = 35;
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_HOMEASSISTANT_SERVICES";
|
||||
|
||||
string service = 1;
|
||||
repeated HomeassistantServiceMap data = 2;
|
||||
repeated HomeassistantServiceMap data_template = 3;
|
||||
repeated HomeassistantServiceMap variables = 4;
|
||||
repeated HomeassistantServiceMap data = 2 [(fixed_vector) = true];
|
||||
repeated HomeassistantServiceMap data_template = 3 [(fixed_vector) = true];
|
||||
repeated HomeassistantServiceMap variables = 4 [(fixed_vector) = true];
|
||||
bool is_event = 5;
|
||||
uint32 call_id = 6 [(field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES"];
|
||||
bool wants_response = 7 [(field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
|
||||
string response_template = 8 [(no_zero_copy) = true, (field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
|
||||
}
|
||||
|
||||
// Message sent by Home Assistant to ESPHome with service call response data
|
||||
message HomeassistantActionResponse {
|
||||
option (id) = 130;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES";
|
||||
|
||||
uint32 call_id = 1; // Matches the call_id from HomeassistantActionRequest
|
||||
bool success = 2; // Whether the service call succeeded
|
||||
string error_message = 3; // Error message if success = false
|
||||
bytes response_data = 4 [(pointer_to_buffer) = true, (field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
|
||||
}
|
||||
|
||||
// ==================== IMPORT HOME ASSISTANT STATES ====================
|
||||
@@ -815,7 +840,7 @@ message GetTimeResponse {
|
||||
option (no_delay) = true;
|
||||
|
||||
fixed32 epoch_seconds = 1;
|
||||
string timezone = 2;
|
||||
string timezone = 2 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
// ==================== USER-DEFINES SERVICES ====================
|
||||
@@ -841,7 +866,7 @@ message ListEntitiesServicesResponse {
|
||||
|
||||
string name = 1;
|
||||
fixed32 key = 2;
|
||||
repeated ListEntitiesServicesArgument args = 3;
|
||||
repeated ListEntitiesServicesArgument args = 3 [(fixed_vector) = true];
|
||||
}
|
||||
message ExecuteServiceArgument {
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
@@ -851,10 +876,10 @@ message ExecuteServiceArgument {
|
||||
string string_ = 4;
|
||||
// ESPHome 1.14 (api v1.3) make int a signed value
|
||||
sint32 int_ = 5;
|
||||
repeated bool bool_array = 6 [packed=false];
|
||||
repeated sint32 int_array = 7 [packed=false];
|
||||
repeated float float_array = 8 [packed=false];
|
||||
repeated string string_array = 9;
|
||||
repeated bool bool_array = 6 [packed=false, (fixed_vector) = true];
|
||||
repeated sint32 int_array = 7 [packed=false, (fixed_vector) = true];
|
||||
repeated float float_array = 8 [packed=false, (fixed_vector) = true];
|
||||
repeated string string_array = 9 [(fixed_vector) = true];
|
||||
}
|
||||
message ExecuteServiceRequest {
|
||||
option (id) = 42;
|
||||
@@ -863,7 +888,7 @@ message ExecuteServiceRequest {
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
|
||||
fixed32 key = 1;
|
||||
repeated ExecuteServiceArgument args = 2;
|
||||
repeated ExecuteServiceArgument args = 2 [(fixed_vector) = true];
|
||||
}
|
||||
|
||||
// ==================== CAMERA ====================
|
||||
@@ -962,8 +987,8 @@ message ListEntitiesClimateResponse {
|
||||
string name = 3;
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
bool supports_current_temperature = 5;
|
||||
bool supports_two_point_target_temperature = 6;
|
||||
bool supports_current_temperature = 5; // Deprecated: use feature_flags
|
||||
bool supports_two_point_target_temperature = 6; // Deprecated: use feature_flags
|
||||
repeated ClimateMode supported_modes = 7 [(container_pointer) = "std::set<climate::ClimateMode>"];
|
||||
float visual_min_temperature = 8;
|
||||
float visual_max_temperature = 9;
|
||||
@@ -972,7 +997,7 @@ message ListEntitiesClimateResponse {
|
||||
// is if CLIMATE_PRESET_AWAY exists is supported_presets
|
||||
// Deprecated in API version 1.5
|
||||
bool legacy_supports_away = 11 [deprecated=true];
|
||||
bool supports_action = 12;
|
||||
bool supports_action = 12; // Deprecated: use feature_flags
|
||||
repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer) = "std::set<climate::ClimateFanMode>"];
|
||||
repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer) = "std::set<climate::ClimateSwingMode>"];
|
||||
repeated string supported_custom_fan_modes = 15 [(container_pointer) = "std::set"];
|
||||
@@ -982,11 +1007,12 @@ message ListEntitiesClimateResponse {
|
||||
string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
EntityCategory entity_category = 20;
|
||||
float visual_current_temperature_step = 21;
|
||||
bool supports_current_humidity = 22;
|
||||
bool supports_target_humidity = 23;
|
||||
bool supports_current_humidity = 22; // Deprecated: use feature_flags
|
||||
bool supports_target_humidity = 23; // Deprecated: use feature_flags
|
||||
float visual_min_humidity = 24;
|
||||
float visual_max_humidity = 25;
|
||||
uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"];
|
||||
uint32 feature_flags = 27;
|
||||
}
|
||||
message ClimateStateResponse {
|
||||
option (id) = 47;
|
||||
@@ -1456,7 +1482,7 @@ message BluetoothDeviceRequest {
|
||||
|
||||
uint64 address = 1;
|
||||
BluetoothDeviceRequestType request_type = 2;
|
||||
bool has_address_type = 3;
|
||||
bool has_address_type = 3; // Deprecated, should be removed in 2027.8 - https://github.com/esphome/esphome/pull/10318
|
||||
uint32 address_type = 4;
|
||||
}
|
||||
|
||||
@@ -1494,7 +1520,7 @@ message BluetoothGATTCharacteristic {
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2, (fixed_array_skip_zero) = true];
|
||||
uint32 handle = 2;
|
||||
uint32 properties = 3;
|
||||
repeated BluetoothGATTDescriptor descriptors = 4;
|
||||
repeated BluetoothGATTDescriptor descriptors = 4 [(fixed_vector) = true];
|
||||
|
||||
// New field for efficient UUID (v1.12+)
|
||||
// Only one of uuid or short_uuid will be set.
|
||||
@@ -1506,7 +1532,7 @@ message BluetoothGATTCharacteristic {
|
||||
message BluetoothGATTService {
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2, (fixed_array_skip_zero) = true];
|
||||
uint32 handle = 2;
|
||||
repeated BluetoothGATTCharacteristic characteristics = 3;
|
||||
repeated BluetoothGATTCharacteristic characteristics = 3 [(fixed_vector) = true];
|
||||
|
||||
// New field for efficient UUID (v1.12+)
|
||||
// Only one of uuid or short_uuid will be set.
|
||||
@@ -1562,7 +1588,7 @@ message BluetoothGATTWriteRequest {
|
||||
uint32 handle = 2;
|
||||
bool response = 3;
|
||||
|
||||
bytes data = 4;
|
||||
bytes data = 4 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
message BluetoothGATTReadDescriptorRequest {
|
||||
@@ -1582,7 +1608,7 @@ message BluetoothGATTWriteDescriptorRequest {
|
||||
uint64 address = 1;
|
||||
uint32 handle = 2;
|
||||
|
||||
bytes data = 3;
|
||||
bytes data = 3 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
message BluetoothGATTNotifyRequest {
|
||||
@@ -1856,10 +1882,22 @@ message VoiceAssistantWakeWord {
|
||||
repeated string trained_languages = 3;
|
||||
}
|
||||
|
||||
message VoiceAssistantExternalWakeWord {
|
||||
string id = 1;
|
||||
string wake_word = 2;
|
||||
repeated string trained_languages = 3;
|
||||
string model_type = 4;
|
||||
uint32 model_size = 5;
|
||||
string model_hash = 6;
|
||||
string url = 7;
|
||||
}
|
||||
|
||||
message VoiceAssistantConfigurationRequest {
|
||||
option (id) = 121;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (ifdef) = "USE_VOICE_ASSISTANT";
|
||||
|
||||
repeated VoiceAssistantExternalWakeWord external_wake_words = 1;
|
||||
}
|
||||
|
||||
message VoiceAssistantConfigurationResponse {
|
||||
@@ -2274,3 +2312,28 @@ message UpdateCommandRequest {
|
||||
UpdateCommand command = 2;
|
||||
uint32 device_id = 3 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
|
||||
// ==================== Z-WAVE ====================
|
||||
|
||||
message ZWaveProxyFrame {
|
||||
option (id) = 128;
|
||||
option (source) = SOURCE_BOTH;
|
||||
option (ifdef) = "USE_ZWAVE_PROXY";
|
||||
option (no_delay) = true;
|
||||
|
||||
bytes data = 1 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
enum ZWaveProxyRequestType {
|
||||
ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE = 0;
|
||||
ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE = 1;
|
||||
ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE = 2;
|
||||
}
|
||||
message ZWaveProxyRequest {
|
||||
option (id) = 129;
|
||||
option (source) = SOURCE_BOTH;
|
||||
option (ifdef) = "USE_ZWAVE_PROXY";
|
||||
|
||||
ZWaveProxyRequestType type = 1;
|
||||
bytes data = 2 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
#endif
|
||||
#include <cerrno>
|
||||
#include <cinttypes>
|
||||
#include <utility>
|
||||
#include <functional>
|
||||
#include <limits>
|
||||
#include <utility>
|
||||
#include "esphome/components/network/util.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/entity_base.h"
|
||||
@@ -27,9 +27,15 @@
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
#include "esphome/components/bluetooth_proxy/bluetooth_proxy.h"
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
#include "esphome/components/climate/climate_mode.h"
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
#include "esphome/components/voice_assistant/voice_assistant.h"
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
#include "esphome/components/zwave_proxy/zwave_proxy.h"
|
||||
#endif
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
@@ -113,8 +119,7 @@ void APIConnection::start() {
|
||||
|
||||
APIError err = this->helper_->init();
|
||||
if (err != APIError::OK) {
|
||||
on_fatal_error();
|
||||
this->log_warning_(LOG_STR("Helper init failed"), err);
|
||||
this->fatal_error_with_log_(LOG_STR("Helper init failed"), err);
|
||||
return;
|
||||
}
|
||||
this->client_info_.peername = helper_->getpeername();
|
||||
@@ -144,8 +149,7 @@ void APIConnection::loop() {
|
||||
|
||||
APIError err = this->helper_->loop();
|
||||
if (err != APIError::OK) {
|
||||
on_fatal_error();
|
||||
this->log_socket_operation_failed_(err);
|
||||
this->fatal_error_with_log_(LOG_STR("Socket operation failed"), err);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -160,17 +164,13 @@ void APIConnection::loop() {
|
||||
// No more data available
|
||||
break;
|
||||
} else if (err != APIError::OK) {
|
||||
on_fatal_error();
|
||||
this->log_warning_(LOG_STR("Reading failed"), err);
|
||||
this->fatal_error_with_log_(LOG_STR("Reading failed"), err);
|
||||
return;
|
||||
} else {
|
||||
this->last_traffic_ = now;
|
||||
// read a packet
|
||||
if (buffer.data_len > 0) {
|
||||
this->read_message(buffer.data_len, buffer.type, &buffer.container[buffer.data_offset]);
|
||||
} else {
|
||||
this->read_message(0, buffer.type, nullptr);
|
||||
}
|
||||
this->read_message(buffer.data_len, buffer.type,
|
||||
buffer.data_len > 0 ? &buffer.container[buffer.data_offset] : nullptr);
|
||||
if (this->flags_.remove)
|
||||
return;
|
||||
}
|
||||
@@ -202,7 +202,8 @@ void APIConnection::loop() {
|
||||
// Disconnect if not responded within 2.5*keepalive
|
||||
if (now - this->last_traffic_ > KEEPALIVE_DISCONNECT_TIMEOUT) {
|
||||
on_fatal_error();
|
||||
ESP_LOGW(TAG, "%s is unresponsive; disconnecting", this->get_client_combined_info().c_str());
|
||||
ESP_LOGW(TAG, "%s (%s) is unresponsive; disconnecting", this->client_info_.name.c_str(),
|
||||
this->client_info_.peername.c_str());
|
||||
}
|
||||
} else if (now - this->last_traffic_ > KEEPALIVE_TIMEOUT_MS && !this->flags_.remove) {
|
||||
// Only send ping if we're not disconnecting
|
||||
@@ -252,7 +253,7 @@ bool APIConnection::send_disconnect_response(const DisconnectRequest &msg) {
|
||||
// remote initiated disconnect_client
|
||||
// don't close yet, we still need to send the disconnect response
|
||||
// close will happen on next loop
|
||||
ESP_LOGD(TAG, "%s disconnected", this->get_client_combined_info().c_str());
|
||||
ESP_LOGD(TAG, "%s (%s) disconnected", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
|
||||
this->flags_.next_close = true;
|
||||
DisconnectResponse resp;
|
||||
return this->send_message(resp, DisconnectResponse::MESSAGE_TYPE);
|
||||
@@ -452,7 +453,6 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *
|
||||
bool is_single) {
|
||||
auto *light = static_cast<light::LightState *>(entity);
|
||||
LightStateResponse resp;
|
||||
auto traits = light->get_traits();
|
||||
auto values = light->remote_values;
|
||||
auto color_mode = values.get_color_mode();
|
||||
resp.state = values.is_on();
|
||||
@@ -476,7 +476,8 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c
|
||||
auto *light = static_cast<light::LightState *>(entity);
|
||||
ListEntitiesLightResponse msg;
|
||||
auto traits = light->get_traits();
|
||||
msg.supported_color_modes = &traits.get_supported_color_modes_for_api_();
|
||||
// Pass pointer to ColorModeMask so the iterator can encode actual ColorMode enum values
|
||||
msg.supported_color_modes = &traits.get_supported_color_modes();
|
||||
if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) ||
|
||||
traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) {
|
||||
msg.min_mireds = traits.get_min_mireds();
|
||||
@@ -625,9 +626,10 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
||||
auto traits = climate->get_traits();
|
||||
resp.mode = static_cast<enums::ClimateMode>(climate->mode);
|
||||
resp.action = static_cast<enums::ClimateAction>(climate->action);
|
||||
if (traits.get_supports_current_temperature())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE))
|
||||
resp.current_temperature = climate->current_temperature;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
resp.target_temperature_low = climate->target_temperature_low;
|
||||
resp.target_temperature_high = climate->target_temperature_high;
|
||||
} else {
|
||||
@@ -646,9 +648,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
||||
}
|
||||
if (traits.get_supports_swing_modes())
|
||||
resp.swing_mode = static_cast<enums::ClimateSwingMode>(climate->swing_mode);
|
||||
if (traits.get_supports_current_humidity())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY))
|
||||
resp.current_humidity = climate->current_humidity;
|
||||
if (traits.get_supports_target_humidity())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
|
||||
resp.target_humidity = climate->target_humidity;
|
||||
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
||||
is_single);
|
||||
@@ -658,10 +660,15 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
||||
auto *climate = static_cast<climate::Climate *>(entity);
|
||||
ListEntitiesClimateResponse msg;
|
||||
auto traits = climate->get_traits();
|
||||
msg.supports_current_temperature = traits.get_supports_current_temperature();
|
||||
msg.supports_current_humidity = traits.get_supports_current_humidity();
|
||||
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
|
||||
msg.supports_target_humidity = traits.get_supports_target_humidity();
|
||||
// Flags set for backward compatibility, deprecated in 2025.11.0
|
||||
msg.supports_current_temperature = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
msg.supports_current_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
msg.supports_two_point_target_temperature = traits.has_feature_flags(
|
||||
climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
msg.supports_target_humidity = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
msg.supports_action = traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION);
|
||||
// Current feature flags and other supported parameters
|
||||
msg.feature_flags = traits.get_feature_flags();
|
||||
msg.supported_modes = &traits.get_supported_modes_for_api_();
|
||||
msg.visual_min_temperature = traits.get_visual_min_temperature();
|
||||
msg.visual_max_temperature = traits.get_visual_max_temperature();
|
||||
@@ -669,7 +676,6 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
||||
msg.visual_current_temperature_step = traits.get_visual_current_temperature_step();
|
||||
msg.visual_min_humidity = traits.get_visual_min_humidity();
|
||||
msg.visual_max_humidity = traits.get_visual_max_humidity();
|
||||
msg.supports_action = traits.get_supports_action();
|
||||
msg.supported_fan_modes = &traits.get_supported_fan_modes_for_api_();
|
||||
msg.supported_custom_fan_modes = &traits.get_supported_custom_fan_modes_for_api_();
|
||||
msg.supported_presets = &traits.get_supported_presets_for_api_();
|
||||
@@ -1075,8 +1081,9 @@ void APIConnection::on_get_time_response(const GetTimeResponse &value) {
|
||||
if (homeassistant::global_homeassistant_time != nullptr) {
|
||||
homeassistant::global_homeassistant_time->set_epoch_time(value.epoch_seconds);
|
||||
#ifdef USE_TIME_TIMEZONE
|
||||
if (!value.timezone.empty() && value.timezone != homeassistant::global_homeassistant_time->get_timezone()) {
|
||||
homeassistant::global_homeassistant_time->set_timezone(value.timezone);
|
||||
if (value.timezone_len > 0) {
|
||||
homeassistant::global_homeassistant_time->set_timezone(reinterpret_cast<const char *>(value.timezone),
|
||||
value.timezone_len);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
@@ -1193,6 +1200,23 @@ bool APIConnection::send_voice_assistant_get_configuration_response(const VoiceA
|
||||
resp_wake_word.trained_languages.push_back(lang);
|
||||
}
|
||||
}
|
||||
|
||||
// Filter external wake words
|
||||
for (auto &wake_word : msg.external_wake_words) {
|
||||
if (wake_word.model_type != "micro") {
|
||||
// microWakeWord only
|
||||
continue;
|
||||
}
|
||||
|
||||
resp.available_wake_words.emplace_back();
|
||||
auto &resp_wake_word = resp.available_wake_words.back();
|
||||
resp_wake_word.set_id(StringRef(wake_word.id));
|
||||
resp_wake_word.set_wake_word(StringRef(wake_word.wake_word));
|
||||
for (const auto &lang : wake_word.trained_languages) {
|
||||
resp_wake_word.trained_languages.push_back(lang);
|
||||
}
|
||||
}
|
||||
|
||||
resp.active_wake_words = &config.active_wake_words;
|
||||
resp.max_active_wake_words = config.max_active_wake_words;
|
||||
return this->send_message(resp, VoiceAssistantConfigurationResponse::MESSAGE_TYPE);
|
||||
@@ -1203,7 +1227,16 @@ void APIConnection::voice_assistant_set_configuration(const VoiceAssistantSetCon
|
||||
voice_assistant::global_voice_assistant->on_set_configuration(msg.active_wake_words);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIConnection::zwave_proxy_frame(const ZWaveProxyFrame &msg) {
|
||||
zwave_proxy::global_zwave_proxy->send_frame(msg.data, msg.data_len);
|
||||
}
|
||||
|
||||
void APIConnection::zwave_proxy_request(const ZWaveProxyRequest &msg) {
|
||||
zwave_proxy::global_zwave_proxy->zwave_proxy_request(this, msg.type);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
@@ -1350,7 +1383,7 @@ void APIConnection::complete_authentication_() {
|
||||
}
|
||||
|
||||
this->flags_.connection_state = static_cast<uint8_t>(ConnectionState::AUTHENTICATED);
|
||||
ESP_LOGD(TAG, "%s connected", this->get_client_combined_info().c_str());
|
||||
ESP_LOGD(TAG, "%s (%s) connected", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
|
||||
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
|
||||
this->parent_->get_client_connected_trigger()->trigger(this->client_info_.name, this->client_info_.peername);
|
||||
#endif
|
||||
@@ -1359,10 +1392,15 @@ void APIConnection::complete_authentication_() {
|
||||
this->send_time_request();
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
if (zwave_proxy::global_zwave_proxy != nullptr) {
|
||||
zwave_proxy::global_zwave_proxy->api_connection_authenticated(this);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
bool APIConnection::send_hello_response(const HelloRequest &msg) {
|
||||
this->client_info_.name = msg.client_info;
|
||||
this->client_info_.name.assign(reinterpret_cast<const char *>(msg.client_info), msg.client_info_len);
|
||||
this->client_info_.peername = this->helper_->getpeername();
|
||||
this->client_api_version_major_ = msg.api_version_major;
|
||||
this->client_api_version_minor_ = msg.api_version_minor;
|
||||
@@ -1371,7 +1409,7 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
|
||||
|
||||
HelloResponse resp;
|
||||
resp.api_version_major = 1;
|
||||
resp.api_version_minor = 12;
|
||||
resp.api_version_minor = 13;
|
||||
// Send only the version string - the client only logs this for debugging and doesn't use it otherwise
|
||||
resp.set_server_info(ESPHOME_VERSION_REF);
|
||||
resp.set_name(StringRef(App.get_name()));
|
||||
@@ -1386,20 +1424,17 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
|
||||
|
||||
return this->send_message(resp, HelloResponse::MESSAGE_TYPE);
|
||||
}
|
||||
bool APIConnection::send_connect_response(const ConnectRequest &msg) {
|
||||
bool correct = true;
|
||||
#ifdef USE_API_PASSWORD
|
||||
correct = this->parent_->check_password(msg.password);
|
||||
#endif
|
||||
|
||||
ConnectResponse resp;
|
||||
bool APIConnection::send_authenticate_response(const AuthenticationRequest &msg) {
|
||||
AuthenticationResponse resp;
|
||||
// bool invalid_password = 1;
|
||||
resp.invalid_password = !correct;
|
||||
if (correct) {
|
||||
resp.invalid_password = !this->parent_->check_password(msg.password, msg.password_len);
|
||||
if (!resp.invalid_password) {
|
||||
this->complete_authentication_();
|
||||
}
|
||||
return this->send_message(resp, ConnectResponse::MESSAGE_TYPE);
|
||||
return this->send_message(resp, AuthenticationResponse::MESSAGE_TYPE);
|
||||
}
|
||||
#endif // USE_API_PASSWORD
|
||||
|
||||
bool APIConnection::send_ping_response(const PingRequest &msg) {
|
||||
PingResponse resp;
|
||||
@@ -1463,6 +1498,10 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
resp.voice_assistant_feature_flags = voice_assistant::global_voice_assistant->get_feature_flags();
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
resp.zwave_proxy_feature_flags = zwave_proxy::global_zwave_proxy->get_feature_flags();
|
||||
resp.zwave_home_id = zwave_proxy::global_zwave_proxy->get_home_id();
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
resp.api_encryption_supported = true;
|
||||
#endif
|
||||
@@ -1513,6 +1552,20 @@ void APIConnection::execute_service(const ExecuteServiceRequest &msg) {
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
void APIConnection::on_homeassistant_action_response(const HomeassistantActionResponse &msg) {
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
if (msg.response_data_len > 0) {
|
||||
this->parent_->handle_action_response(msg.call_id, msg.success, msg.error_message, msg.response_data,
|
||||
msg.response_data_len);
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
this->parent_->handle_action_response(msg.call_id, msg.success, msg.error_message);
|
||||
}
|
||||
};
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
bool APIConnection::send_noise_encryption_set_key_response(const NoiseEncryptionSetKeyRequest &msg) {
|
||||
NoiseEncryptionSetKeyResponse resp;
|
||||
@@ -1543,8 +1596,7 @@ bool APIConnection::try_to_clear_buffer(bool log_out_of_space) {
|
||||
delay(0);
|
||||
APIError err = this->helper_->loop();
|
||||
if (err != APIError::OK) {
|
||||
on_fatal_error();
|
||||
this->log_socket_operation_failed_(err);
|
||||
this->fatal_error_with_log_(LOG_STR("Socket operation failed"), err);
|
||||
return false;
|
||||
}
|
||||
if (this->helper_->can_write_without_blocking())
|
||||
@@ -1563,8 +1615,7 @@ bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
|
||||
if (err == APIError::WOULD_BLOCK)
|
||||
return false;
|
||||
if (err != APIError::OK) {
|
||||
on_fatal_error();
|
||||
this->log_warning_(LOG_STR("Packet write failed"), err);
|
||||
this->fatal_error_with_log_(LOG_STR("Packet write failed"), err);
|
||||
return false;
|
||||
}
|
||||
// Do not set last_traffic_ on send
|
||||
@@ -1573,12 +1624,12 @@ bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
|
||||
#ifdef USE_API_PASSWORD
|
||||
void APIConnection::on_unauthenticated_access() {
|
||||
this->on_fatal_error();
|
||||
ESP_LOGD(TAG, "%s access without authentication", this->get_client_combined_info().c_str());
|
||||
ESP_LOGD(TAG, "%s (%s) no authentication", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
|
||||
}
|
||||
#endif
|
||||
void APIConnection::on_no_setup_connection() {
|
||||
this->on_fatal_error();
|
||||
ESP_LOGD(TAG, "%s access without full connection", this->get_client_combined_info().c_str());
|
||||
ESP_LOGD(TAG, "%s (%s) no connection setup", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
|
||||
}
|
||||
void APIConnection::on_fatal_error() {
|
||||
this->helper_->close();
|
||||
@@ -1750,8 +1801,7 @@ void APIConnection::process_batch_() {
|
||||
APIError err = this->helper_->write_protobuf_packets(ProtoWriteBuffer{&shared_buf},
|
||||
std::span<const PacketInfo>(packet_info, packet_count));
|
||||
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
|
||||
on_fatal_error();
|
||||
this->log_warning_(LOG_STR("Batch write failed"), err);
|
||||
this->fatal_error_with_log_(LOG_STR("Batch write failed"), err);
|
||||
}
|
||||
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
@@ -1830,12 +1880,8 @@ void APIConnection::process_state_subscriptions_() {
|
||||
#endif // USE_API_HOMEASSISTANT_STATES
|
||||
|
||||
void APIConnection::log_warning_(const LogString *message, APIError err) {
|
||||
ESP_LOGW(TAG, "%s: %s %s errno=%d", this->get_client_combined_info().c_str(), LOG_STR_ARG(message),
|
||||
LOG_STR_ARG(api_error_to_logstr(err)), errno);
|
||||
}
|
||||
|
||||
void APIConnection::log_socket_operation_failed_(APIError err) {
|
||||
this->log_warning_(LOG_STR("Socket operation failed"), err);
|
||||
ESP_LOGW(TAG, "%s (%s): %s %s errno=%d", this->client_info_.name.c_str(), this->client_info_.peername.c_str(),
|
||||
LOG_STR_ARG(message), LOG_STR_ARG(api_error_to_logstr(err)), errno);
|
||||
}
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/entity_base.h"
|
||||
|
||||
#include <vector>
|
||||
#include <functional>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
@@ -19,14 +19,6 @@ namespace esphome::api {
|
||||
struct ClientInfo {
|
||||
std::string name; // Client name from Hello message
|
||||
std::string peername; // IP:port from socket
|
||||
|
||||
std::string get_combined_info() const {
|
||||
if (name == peername) {
|
||||
// Before Hello message, both are the same
|
||||
return name;
|
||||
}
|
||||
return name + " (" + peername + ")";
|
||||
}
|
||||
};
|
||||
|
||||
// Keepalive timeout in milliseconds
|
||||
@@ -132,12 +124,15 @@ class APIConnection final : public APIServerConnection {
|
||||
#endif
|
||||
bool try_send_log_message(int level, const char *tag, const char *line, size_t message_len);
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void send_homeassistant_service_call(const HomeassistantServiceResponse &call) {
|
||||
void send_homeassistant_action(const HomeassistantActionRequest &call) {
|
||||
if (!this->flags_.service_call_subscription)
|
||||
return;
|
||||
this->send_message(call, HomeassistantServiceResponse::MESSAGE_TYPE);
|
||||
this->send_message(call, HomeassistantActionRequest::MESSAGE_TYPE);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
void on_homeassistant_action_response(const HomeassistantActionResponse &msg) override;
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#endif // USE_API_HOMEASSISTANT_SERVICES
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) override;
|
||||
void unsubscribe_bluetooth_le_advertisements(const UnsubscribeBluetoothLEAdvertisementsRequest &msg) override;
|
||||
@@ -171,6 +166,11 @@ class APIConnection final : public APIServerConnection {
|
||||
void voice_assistant_set_configuration(const VoiceAssistantSetConfiguration &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void zwave_proxy_frame(const ZWaveProxyFrame &msg) override;
|
||||
void zwave_proxy_request(const ZWaveProxyRequest &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
bool send_alarm_control_panel_state(alarm_control_panel::AlarmControlPanel *a_alarm_control_panel);
|
||||
void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) override;
|
||||
@@ -197,7 +197,9 @@ class APIConnection final : public APIServerConnection {
|
||||
void on_get_time_response(const GetTimeResponse &value) override;
|
||||
#endif
|
||||
bool send_hello_response(const HelloRequest &msg) override;
|
||||
bool send_connect_response(const ConnectRequest &msg) override;
|
||||
#ifdef USE_API_PASSWORD
|
||||
bool send_authenticate_response(const AuthenticationRequest &msg) override;
|
||||
#endif
|
||||
bool send_disconnect_response(const DisconnectRequest &msg) override;
|
||||
bool send_ping_response(const PingRequest &msg) override;
|
||||
bool send_device_info_response(const DeviceInfoRequest &msg) override;
|
||||
@@ -271,7 +273,8 @@ class APIConnection final : public APIServerConnection {
|
||||
bool try_to_clear_buffer(bool log_out_of_space);
|
||||
bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) override;
|
||||
|
||||
std::string get_client_combined_info() const { return this->client_info_.get_combined_info(); }
|
||||
const std::string &get_name() const { return this->client_info_.name; }
|
||||
const std::string &get_peername() const { return this->client_info_.peername; }
|
||||
|
||||
protected:
|
||||
// Helper function to handle authentication completion
|
||||
@@ -732,8 +735,11 @@ class APIConnection final : public APIServerConnection {
|
||||
|
||||
// Helper function to log API errors with errno
|
||||
void log_warning_(const LogString *message, APIError err);
|
||||
// Specific helper for duplicated error message
|
||||
void log_socket_operation_failed_(APIError err);
|
||||
// Helper to handle fatal errors with logging
|
||||
inline void fatal_error_with_log_(const LogString *message, APIError err) {
|
||||
this->on_fatal_error();
|
||||
this->log_warning_(message, err);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -13,7 +13,8 @@ namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.frame_helper";
|
||||
|
||||
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
|
||||
#define HELPER_LOG(msg, ...) \
|
||||
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), this->client_info_->peername.c_str(), ##__VA_ARGS__)
|
||||
|
||||
#ifdef HELPER_LOG_PACKETS
|
||||
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
|
||||
@@ -80,7 +81,7 @@ const LogString *api_error_to_logstr(APIError err) {
|
||||
|
||||
// Default implementation for loop - handles sending buffered data
|
||||
APIError APIFrameHelper::loop() {
|
||||
if (!this->tx_buf_.empty()) {
|
||||
if (this->tx_buf_count_ > 0) {
|
||||
APIError err = try_send_tx_buf_();
|
||||
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
|
||||
return err;
|
||||
@@ -102,9 +103,20 @@ APIError APIFrameHelper::handle_socket_write_error_() {
|
||||
// Helper method to buffer data from IOVs
|
||||
void APIFrameHelper::buffer_data_from_iov_(const struct iovec *iov, int iovcnt, uint16_t total_write_len,
|
||||
uint16_t offset) {
|
||||
SendBuffer buffer;
|
||||
buffer.size = total_write_len - offset;
|
||||
buffer.data = std::make_unique<uint8_t[]>(buffer.size);
|
||||
// Check if queue is full
|
||||
if (this->tx_buf_count_ >= API_MAX_SEND_QUEUE) {
|
||||
HELPER_LOG("Send queue full (%u buffers), dropping connection", this->tx_buf_count_);
|
||||
this->state_ = State::FAILED;
|
||||
return;
|
||||
}
|
||||
|
||||
uint16_t buffer_size = total_write_len - offset;
|
||||
auto &buffer = this->tx_buf_[this->tx_buf_tail_];
|
||||
buffer = std::make_unique<SendBuffer>(SendBuffer{
|
||||
.data = std::make_unique<uint8_t[]>(buffer_size),
|
||||
.size = buffer_size,
|
||||
.offset = 0,
|
||||
});
|
||||
|
||||
uint16_t to_skip = offset;
|
||||
uint16_t write_pos = 0;
|
||||
@@ -117,12 +129,15 @@ void APIFrameHelper::buffer_data_from_iov_(const struct iovec *iov, int iovcnt,
|
||||
// Include this segment (partially or fully)
|
||||
const uint8_t *src = reinterpret_cast<uint8_t *>(iov[i].iov_base) + to_skip;
|
||||
uint16_t len = static_cast<uint16_t>(iov[i].iov_len) - to_skip;
|
||||
std::memcpy(buffer.data.get() + write_pos, src, len);
|
||||
std::memcpy(buffer->data.get() + write_pos, src, len);
|
||||
write_pos += len;
|
||||
to_skip = 0;
|
||||
}
|
||||
}
|
||||
this->tx_buf_.push_back(std::move(buffer));
|
||||
|
||||
// Update circular buffer tracking
|
||||
this->tx_buf_tail_ = (this->tx_buf_tail_ + 1) % API_MAX_SEND_QUEUE;
|
||||
this->tx_buf_count_++;
|
||||
}
|
||||
|
||||
// This method writes data to socket or buffers it
|
||||
@@ -140,7 +155,7 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
|
||||
#endif
|
||||
|
||||
// Try to send any existing buffered data first if there is any
|
||||
if (!this->tx_buf_.empty()) {
|
||||
if (this->tx_buf_count_ > 0) {
|
||||
APIError send_result = try_send_tx_buf_();
|
||||
// If real error occurred (not just WOULD_BLOCK), return it
|
||||
if (send_result != APIError::OK && send_result != APIError::WOULD_BLOCK) {
|
||||
@@ -149,7 +164,7 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
|
||||
|
||||
// If there is still data in the buffer, we can't send, buffer
|
||||
// the new data and return
|
||||
if (!this->tx_buf_.empty()) {
|
||||
if (this->tx_buf_count_ > 0) {
|
||||
this->buffer_data_from_iov_(iov, iovcnt, total_write_len, 0);
|
||||
return APIError::OK; // Success, data buffered
|
||||
}
|
||||
@@ -177,32 +192,31 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
|
||||
}
|
||||
|
||||
// Common implementation for trying to send buffered data
|
||||
// IMPORTANT: Caller MUST ensure tx_buf_ is not empty before calling this method
|
||||
// IMPORTANT: Caller MUST ensure tx_buf_count_ > 0 before calling this method
|
||||
APIError APIFrameHelper::try_send_tx_buf_() {
|
||||
// Try to send from tx_buf - we assume it's not empty as it's the caller's responsibility to check
|
||||
bool tx_buf_empty = false;
|
||||
while (!tx_buf_empty) {
|
||||
while (this->tx_buf_count_ > 0) {
|
||||
// Get the first buffer in the queue
|
||||
SendBuffer &front_buffer = this->tx_buf_.front();
|
||||
SendBuffer *front_buffer = this->tx_buf_[this->tx_buf_head_].get();
|
||||
|
||||
// Try to send the remaining data in this buffer
|
||||
ssize_t sent = this->socket_->write(front_buffer.current_data(), front_buffer.remaining());
|
||||
ssize_t sent = this->socket_->write(front_buffer->current_data(), front_buffer->remaining());
|
||||
|
||||
if (sent == -1) {
|
||||
return this->handle_socket_write_error_();
|
||||
} else if (sent == 0) {
|
||||
// Nothing sent but not an error
|
||||
return APIError::WOULD_BLOCK;
|
||||
} else if (static_cast<uint16_t>(sent) < front_buffer.remaining()) {
|
||||
} else if (static_cast<uint16_t>(sent) < front_buffer->remaining()) {
|
||||
// Partially sent, update offset
|
||||
// Cast to ensure no overflow issues with uint16_t
|
||||
front_buffer.offset += static_cast<uint16_t>(sent);
|
||||
front_buffer->offset += static_cast<uint16_t>(sent);
|
||||
return APIError::WOULD_BLOCK; // Stop processing more buffers if we couldn't send a complete buffer
|
||||
} else {
|
||||
// Buffer completely sent, remove it from the queue
|
||||
this->tx_buf_.pop_front();
|
||||
// Update empty status for the loop condition
|
||||
tx_buf_empty = this->tx_buf_.empty();
|
||||
this->tx_buf_[this->tx_buf_head_].reset();
|
||||
this->tx_buf_head_ = (this->tx_buf_head_ + 1) % API_MAX_SEND_QUEUE;
|
||||
this->tx_buf_count_--;
|
||||
// Continue loop to try sending the next buffer
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#pragma once
|
||||
#include <array>
|
||||
#include <cstdint>
|
||||
#include <deque>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <span>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
@@ -17,6 +18,17 @@ namespace esphome::api {
|
||||
// uncomment to log raw packets
|
||||
//#define HELPER_LOG_PACKETS
|
||||
|
||||
// Maximum message size limits to prevent OOM on constrained devices
|
||||
// Handshake messages are limited to a small size for security
|
||||
static constexpr uint16_t MAX_HANDSHAKE_SIZE = 128;
|
||||
|
||||
// Data message limits vary by platform based on available memory
|
||||
#ifdef USE_ESP8266
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 8192; // 8 KiB for ESP8266
|
||||
#else
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 32768; // 32 KiB for ESP32 and other platforms
|
||||
#endif
|
||||
|
||||
// Forward declaration
|
||||
struct ClientInfo;
|
||||
|
||||
@@ -79,7 +91,7 @@ class APIFrameHelper {
|
||||
virtual APIError init() = 0;
|
||||
virtual APIError loop();
|
||||
virtual APIError read_packet(ReadPacketBuffer *buffer) = 0;
|
||||
bool can_write_without_blocking() { return state_ == State::DATA && tx_buf_.empty(); }
|
||||
bool can_write_without_blocking() { return this->state_ == State::DATA && this->tx_buf_count_ == 0; }
|
||||
std::string getpeername() { return socket_->getpeername(); }
|
||||
int getpeername(struct sockaddr *addr, socklen_t *addrlen) { return socket_->getpeername(addr, addrlen); }
|
||||
APIError close() {
|
||||
@@ -161,7 +173,7 @@ class APIFrameHelper {
|
||||
};
|
||||
|
||||
// Containers (size varies, but typically 12+ bytes on 32-bit)
|
||||
std::deque<SendBuffer> tx_buf_;
|
||||
std::array<std::unique_ptr<SendBuffer>, API_MAX_SEND_QUEUE> tx_buf_;
|
||||
std::vector<struct iovec> reusable_iovs_;
|
||||
std::vector<uint8_t> rx_buf_;
|
||||
|
||||
@@ -174,7 +186,10 @@ class APIFrameHelper {
|
||||
State state_{State::INITIALIZE};
|
||||
uint8_t frame_header_padding_{0};
|
||||
uint8_t frame_footer_size_{0};
|
||||
// 5 bytes total, 3 bytes padding
|
||||
uint8_t tx_buf_head_{0};
|
||||
uint8_t tx_buf_tail_{0};
|
||||
uint8_t tx_buf_count_{0};
|
||||
// 8 bytes total, 0 bytes padding
|
||||
|
||||
// Common initialization for both plaintext and noise protocols
|
||||
APIError init_common_();
|
||||
|
||||
@@ -24,7 +24,8 @@ static const char *const PROLOGUE_INIT = "NoiseAPIInit";
|
||||
#endif
|
||||
static constexpr size_t PROLOGUE_INIT_LEN = 12; // strlen("NoiseAPIInit")
|
||||
|
||||
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
|
||||
#define HELPER_LOG(msg, ...) \
|
||||
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), this->client_info_->peername.c_str(), ##__VA_ARGS__)
|
||||
|
||||
#ifdef HELPER_LOG_PACKETS
|
||||
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
|
||||
@@ -131,26 +132,16 @@ APIError APINoiseFrameHelper::loop() {
|
||||
return APIFrameHelper::loop();
|
||||
}
|
||||
|
||||
/** Read a packet into the rx_buf_. If successful, stores frame data in the frame parameter
|
||||
/** Read a packet into the rx_buf_.
|
||||
*
|
||||
* @param frame: The struct to hold the frame information in.
|
||||
* msg_start: points to the start of the payload - this pointer is only valid until the next
|
||||
* try_receive_raw_ call
|
||||
*
|
||||
* @return 0 if a full packet is in rx_buf_
|
||||
* @return -1 if error, check errno.
|
||||
* @return APIError::OK if a full packet is in rx_buf_
|
||||
*
|
||||
* errno EWOULDBLOCK: Packet could not be read without blocking. Try again later.
|
||||
* errno ENOMEM: Not enough memory for reading packet.
|
||||
* errno API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
|
||||
* errno API_ERROR_HANDSHAKE_PACKET_LEN: Packet too big for this phase.
|
||||
*/
|
||||
APIError APINoiseFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
if (frame == nullptr) {
|
||||
HELPER_LOG("Bad argument for try_read_frame_");
|
||||
return APIError::BAD_ARG;
|
||||
}
|
||||
|
||||
APIError APINoiseFrameHelper::try_read_frame_() {
|
||||
// read header
|
||||
if (rx_header_buf_len_ < 3) {
|
||||
// no header information yet
|
||||
@@ -177,16 +168,17 @@ APIError APINoiseFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
// read body
|
||||
uint16_t msg_size = (((uint16_t) rx_header_buf_[1]) << 8) | rx_header_buf_[2];
|
||||
|
||||
if (state_ != State::DATA && msg_size > 128) {
|
||||
// for handshake message only permit up to 128 bytes
|
||||
// Check against size limits to prevent OOM: MAX_HANDSHAKE_SIZE for handshake, MAX_MESSAGE_SIZE for data
|
||||
uint16_t limit = (state_ == State::DATA) ? MAX_MESSAGE_SIZE : MAX_HANDSHAKE_SIZE;
|
||||
if (msg_size > limit) {
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad packet len for handshake: %d", msg_size);
|
||||
return APIError::BAD_HANDSHAKE_PACKET_LEN;
|
||||
HELPER_LOG("Bad packet: message size %u exceeds maximum %u", msg_size, limit);
|
||||
return (state_ == State::DATA) ? APIError::BAD_DATA_PACKET : APIError::BAD_HANDSHAKE_PACKET_LEN;
|
||||
}
|
||||
|
||||
// reserve space for body
|
||||
if (rx_buf_.size() != msg_size) {
|
||||
rx_buf_.resize(msg_size);
|
||||
// Reserve space for body
|
||||
if (this->rx_buf_.size() != msg_size) {
|
||||
this->rx_buf_.resize(msg_size);
|
||||
}
|
||||
|
||||
if (rx_buf_len_ < msg_size) {
|
||||
@@ -204,12 +196,12 @@ APIError APINoiseFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
}
|
||||
}
|
||||
|
||||
LOG_PACKET_RECEIVED(rx_buf_);
|
||||
*frame = std::move(rx_buf_);
|
||||
// consume msg
|
||||
rx_buf_ = {};
|
||||
rx_buf_len_ = 0;
|
||||
rx_header_buf_len_ = 0;
|
||||
LOG_PACKET_RECEIVED(this->rx_buf_);
|
||||
|
||||
// Clear state for next frame (rx_buf_ still contains data for caller)
|
||||
this->rx_buf_len_ = 0;
|
||||
this->rx_header_buf_len_ = 0;
|
||||
|
||||
return APIError::OK;
|
||||
}
|
||||
|
||||
@@ -231,18 +223,17 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
}
|
||||
if (state_ == State::CLIENT_HELLO) {
|
||||
// waiting for client hello
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
aerr = this->try_read_frame_();
|
||||
if (aerr != APIError::OK) {
|
||||
return handle_handshake_frame_error_(aerr);
|
||||
}
|
||||
// ignore contents, may be used in future for flags
|
||||
// Resize for: existing prologue + 2 size bytes + frame data
|
||||
size_t old_size = prologue_.size();
|
||||
prologue_.resize(old_size + 2 + frame.size());
|
||||
prologue_[old_size] = (uint8_t) (frame.size() >> 8);
|
||||
prologue_[old_size + 1] = (uint8_t) frame.size();
|
||||
std::memcpy(prologue_.data() + old_size + 2, frame.data(), frame.size());
|
||||
size_t old_size = this->prologue_.size();
|
||||
this->prologue_.resize(old_size + 2 + this->rx_buf_.size());
|
||||
this->prologue_[old_size] = (uint8_t) (this->rx_buf_.size() >> 8);
|
||||
this->prologue_[old_size + 1] = (uint8_t) this->rx_buf_.size();
|
||||
std::memcpy(this->prologue_.data() + old_size + 2, this->rx_buf_.data(), this->rx_buf_.size());
|
||||
|
||||
state_ = State::SERVER_HELLO;
|
||||
}
|
||||
@@ -251,7 +242,6 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
const std::string &name = App.get_name();
|
||||
const std::string &mac = get_mac_address();
|
||||
|
||||
std::vector<uint8_t> msg;
|
||||
// Calculate positions and sizes
|
||||
size_t name_len = name.size() + 1; // including null terminator
|
||||
size_t mac_len = mac.size() + 1; // including null terminator
|
||||
@@ -259,17 +249,17 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
size_t mac_offset = name_offset + name_len;
|
||||
size_t total_size = 1 + name_len + mac_len;
|
||||
|
||||
msg.resize(total_size);
|
||||
auto msg = std::make_unique<uint8_t[]>(total_size);
|
||||
|
||||
// chosen proto
|
||||
msg[0] = 0x01;
|
||||
|
||||
// node name, terminated by null byte
|
||||
std::memcpy(msg.data() + name_offset, name.c_str(), name_len);
|
||||
std::memcpy(msg.get() + name_offset, name.c_str(), name_len);
|
||||
// node mac, terminated by null byte
|
||||
std::memcpy(msg.data() + mac_offset, mac.c_str(), mac_len);
|
||||
std::memcpy(msg.get() + mac_offset, mac.c_str(), mac_len);
|
||||
|
||||
aerr = write_frame_(msg.data(), msg.size());
|
||||
aerr = write_frame_(msg.get(), total_size);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
@@ -284,24 +274,23 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
int action = noise_handshakestate_get_action(handshake_);
|
||||
if (action == NOISE_ACTION_READ_MESSAGE) {
|
||||
// waiting for handshake msg
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
aerr = this->try_read_frame_();
|
||||
if (aerr != APIError::OK) {
|
||||
return handle_handshake_frame_error_(aerr);
|
||||
}
|
||||
|
||||
if (frame.empty()) {
|
||||
if (this->rx_buf_.empty()) {
|
||||
send_explicit_handshake_reject_(LOG_STR("Empty handshake message"));
|
||||
return APIError::BAD_HANDSHAKE_ERROR_BYTE;
|
||||
} else if (frame[0] != 0x00) {
|
||||
HELPER_LOG("Bad handshake error byte: %u", frame[0]);
|
||||
} else if (this->rx_buf_[0] != 0x00) {
|
||||
HELPER_LOG("Bad handshake error byte: %u", this->rx_buf_[0]);
|
||||
send_explicit_handshake_reject_(LOG_STR("Bad handshake error byte"));
|
||||
return APIError::BAD_HANDSHAKE_ERROR_BYTE;
|
||||
}
|
||||
|
||||
NoiseBuffer mbuf;
|
||||
noise_buffer_init(mbuf);
|
||||
noise_buffer_set_input(mbuf, frame.data() + 1, frame.size() - 1);
|
||||
noise_buffer_set_input(mbuf, this->rx_buf_.data() + 1, this->rx_buf_.size() - 1);
|
||||
err = noise_handshakestate_read_message(handshake_, &mbuf, nullptr);
|
||||
if (err != 0) {
|
||||
// Special handling for MAC failure
|
||||
@@ -349,64 +338,62 @@ void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reaso
|
||||
#ifdef USE_STORE_LOG_STR_IN_FLASH
|
||||
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
|
||||
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(reason_len + 1);
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message from PROGMEM
|
||||
if (reason_len > 0) {
|
||||
memcpy_P(data.data() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||
memcpy_P(data.get() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||
}
|
||||
#else
|
||||
// Normal memory access
|
||||
const char *reason_str = LOG_STR_ARG(reason);
|
||||
size_t reason_len = strlen(reason_str);
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(reason_len + 1);
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message in bulk
|
||||
if (reason_len > 0) {
|
||||
std::memcpy(data.data() + 1, reason_str, reason_len);
|
||||
std::memcpy(data.get() + 1, reason_str, reason_len);
|
||||
}
|
||||
#endif
|
||||
|
||||
// temporarily remove failed state
|
||||
auto orig_state = state_;
|
||||
state_ = State::EXPLICIT_REJECT;
|
||||
write_frame_(data.data(), data.size());
|
||||
write_frame_(data.get(), data_size);
|
||||
state_ = orig_state;
|
||||
}
|
||||
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
int err;
|
||||
APIError aerr;
|
||||
aerr = state_action_();
|
||||
APIError aerr = this->state_action_();
|
||||
if (aerr != APIError::OK) {
|
||||
return aerr;
|
||||
}
|
||||
|
||||
if (state_ != State::DATA) {
|
||||
if (this->state_ != State::DATA) {
|
||||
return APIError::WOULD_BLOCK;
|
||||
}
|
||||
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
aerr = this->try_read_frame_();
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
NoiseBuffer mbuf;
|
||||
noise_buffer_init(mbuf);
|
||||
noise_buffer_set_inout(mbuf, frame.data(), frame.size(), frame.size());
|
||||
err = noise_cipherstate_decrypt(recv_cipher_, &mbuf);
|
||||
noise_buffer_set_inout(mbuf, this->rx_buf_.data(), this->rx_buf_.size(), this->rx_buf_.size());
|
||||
int err = noise_cipherstate_decrypt(this->recv_cipher_, &mbuf);
|
||||
APIError decrypt_err =
|
||||
handle_noise_error_(err, LOG_STR("noise_cipherstate_decrypt"), APIError::CIPHERSTATE_DECRYPT_FAILED);
|
||||
if (decrypt_err != APIError::OK)
|
||||
if (decrypt_err != APIError::OK) {
|
||||
return decrypt_err;
|
||||
}
|
||||
|
||||
uint16_t msg_size = mbuf.size;
|
||||
uint8_t *msg_data = frame.data();
|
||||
uint8_t *msg_data = this->rx_buf_.data();
|
||||
if (msg_size < 4) {
|
||||
state_ = State::FAILED;
|
||||
this->state_ = State::FAILED;
|
||||
HELPER_LOG("Bad data packet: size %d too short", msg_size);
|
||||
return APIError::BAD_DATA_PACKET;
|
||||
}
|
||||
@@ -414,12 +401,12 @@ APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
uint16_t type = (((uint16_t) msg_data[0]) << 8) | msg_data[1];
|
||||
uint16_t data_len = (((uint16_t) msg_data[2]) << 8) | msg_data[3];
|
||||
if (data_len > msg_size - 4) {
|
||||
state_ = State::FAILED;
|
||||
this->state_ = State::FAILED;
|
||||
HELPER_LOG("Bad data packet: data_len %u greater than msg_size %u", data_len, msg_size);
|
||||
return APIError::BAD_DATA_PACKET;
|
||||
}
|
||||
|
||||
buffer->container = std::move(frame);
|
||||
buffer->container = std::move(this->rx_buf_);
|
||||
buffer->data_offset = 4;
|
||||
buffer->data_len = data_len;
|
||||
buffer->type = type;
|
||||
|
||||
@@ -28,7 +28,7 @@ class APINoiseFrameHelper final : public APIFrameHelper {
|
||||
|
||||
protected:
|
||||
APIError state_action_();
|
||||
APIError try_read_frame_(std::vector<uint8_t> *frame);
|
||||
APIError try_read_frame_();
|
||||
APIError write_frame_(const uint8_t *data, uint16_t len);
|
||||
APIError init_handshake_();
|
||||
APIError check_handshake_finished_();
|
||||
|
||||
@@ -18,7 +18,8 @@ namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.plaintext";
|
||||
|
||||
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
|
||||
#define HELPER_LOG(msg, ...) \
|
||||
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), this->client_info_->peername.c_str(), ##__VA_ARGS__)
|
||||
|
||||
#ifdef HELPER_LOG_PACKETS
|
||||
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
|
||||
@@ -46,21 +47,13 @@ APIError APIPlaintextFrameHelper::loop() {
|
||||
return APIFrameHelper::loop();
|
||||
}
|
||||
|
||||
/** Read a packet into the rx_buf_. If successful, stores frame data in the frame parameter
|
||||
*
|
||||
* @param frame: The struct to hold the frame information in.
|
||||
* msg: store the parsed frame in that struct
|
||||
/** Read a packet into the rx_buf_.
|
||||
*
|
||||
* @return See APIError
|
||||
*
|
||||
* error API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
|
||||
*/
|
||||
APIError APIPlaintextFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
if (frame == nullptr) {
|
||||
HELPER_LOG("Bad argument for try_read_frame_");
|
||||
return APIError::BAD_ARG;
|
||||
}
|
||||
|
||||
APIError APIPlaintextFrameHelper::try_read_frame_() {
|
||||
// read header
|
||||
while (!rx_header_parsed_) {
|
||||
// Now that we know when the socket is ready, we can read up to 3 bytes
|
||||
@@ -122,10 +115,10 @@ APIError APIPlaintextFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg_size_varint->as_uint32() > std::numeric_limits<uint16_t>::max()) {
|
||||
if (msg_size_varint->as_uint32() > MAX_MESSAGE_SIZE) {
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad packet: message size %" PRIu32 " exceeds maximum %u", msg_size_varint->as_uint32(),
|
||||
std::numeric_limits<uint16_t>::max());
|
||||
MAX_MESSAGE_SIZE);
|
||||
return APIError::BAD_DATA_PACKET;
|
||||
}
|
||||
rx_header_parsed_len_ = msg_size_varint->as_uint16();
|
||||
@@ -149,9 +142,9 @@ APIError APIPlaintextFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
}
|
||||
// header reading done
|
||||
|
||||
// reserve space for body
|
||||
if (rx_buf_.size() != rx_header_parsed_len_) {
|
||||
rx_buf_.resize(rx_header_parsed_len_);
|
||||
// Reserve space for body
|
||||
if (this->rx_buf_.size() != this->rx_header_parsed_len_) {
|
||||
this->rx_buf_.resize(this->rx_header_parsed_len_);
|
||||
}
|
||||
|
||||
if (rx_buf_len_ < rx_header_parsed_len_) {
|
||||
@@ -169,24 +162,22 @@ APIError APIPlaintextFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
}
|
||||
}
|
||||
|
||||
LOG_PACKET_RECEIVED(rx_buf_);
|
||||
*frame = std::move(rx_buf_);
|
||||
// consume msg
|
||||
rx_buf_ = {};
|
||||
rx_buf_len_ = 0;
|
||||
rx_header_buf_pos_ = 0;
|
||||
rx_header_parsed_ = false;
|
||||
LOG_PACKET_RECEIVED(this->rx_buf_);
|
||||
|
||||
// Clear state for next frame (rx_buf_ still contains data for caller)
|
||||
this->rx_buf_len_ = 0;
|
||||
this->rx_header_buf_pos_ = 0;
|
||||
this->rx_header_parsed_ = false;
|
||||
|
||||
return APIError::OK;
|
||||
}
|
||||
APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
APIError aerr;
|
||||
|
||||
if (state_ != State::DATA) {
|
||||
APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
if (this->state_ != State::DATA) {
|
||||
return APIError::WOULD_BLOCK;
|
||||
}
|
||||
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
APIError aerr = this->try_read_frame_();
|
||||
if (aerr != APIError::OK) {
|
||||
if (aerr == APIError::BAD_INDICATOR) {
|
||||
// Make sure to tell the remote that we don't
|
||||
@@ -219,10 +210,10 @@ APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
return aerr;
|
||||
}
|
||||
|
||||
buffer->container = std::move(frame);
|
||||
buffer->container = std::move(this->rx_buf_);
|
||||
buffer->data_offset = 0;
|
||||
buffer->data_len = rx_header_parsed_len_;
|
||||
buffer->type = rx_header_parsed_type_;
|
||||
buffer->data_len = this->rx_header_parsed_len_;
|
||||
buffer->type = this->rx_header_parsed_type_;
|
||||
return APIError::OK;
|
||||
}
|
||||
APIError APIPlaintextFrameHelper::write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) {
|
||||
|
||||
@@ -24,7 +24,7 @@ class APIPlaintextFrameHelper final : public APIFrameHelper {
|
||||
APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) override;
|
||||
|
||||
protected:
|
||||
APIError try_read_frame_(std::vector<uint8_t> *frame);
|
||||
APIError try_read_frame_();
|
||||
|
||||
// Group 2-byte aligned types
|
||||
uint16_t rx_header_parsed_type_ = 0;
|
||||
|
||||
@@ -32,6 +32,13 @@ extend google.protobuf.FieldOptions {
|
||||
optional string fixed_array_size_define = 50010;
|
||||
optional string fixed_array_with_length_define = 50011;
|
||||
|
||||
// pointer_to_buffer: Use pointer instead of array for fixed-size byte fields
|
||||
// When set, the field will be declared as a pointer (const uint8_t *data)
|
||||
// instead of an array (uint8_t data[N]). This allows zero-copy on decode
|
||||
// by pointing directly to the protobuf buffer. The buffer must remain valid
|
||||
// until the message is processed (which is guaranteed for stack-allocated messages).
|
||||
optional bool pointer_to_buffer = 50012 [default=false];
|
||||
|
||||
// container_pointer: Zero-copy optimization for repeated fields.
|
||||
//
|
||||
// When container_pointer is set on a repeated field, the generated message will
|
||||
@@ -57,4 +64,20 @@ extend google.protobuf.FieldOptions {
|
||||
// This is typically done through methods returning const T& or special accessor
|
||||
// methods like get_options() or supported_modes_for_api_().
|
||||
optional string container_pointer = 50001;
|
||||
|
||||
// fixed_vector: Use FixedVector instead of std::vector for repeated fields
|
||||
// When set, the repeated field will use FixedVector<T> which requires calling
|
||||
// init(size) before adding elements. This eliminates std::vector template overhead
|
||||
// and is ideal when the exact size is known before populating the array.
|
||||
optional bool fixed_vector = 50013 [default=false];
|
||||
|
||||
// container_pointer_no_template: Use a non-template container type for repeated fields
|
||||
// Similar to container_pointer, but for containers that don't take template parameters.
|
||||
// The container type is used as-is without appending element type.
|
||||
// The container must have:
|
||||
// - begin() and end() methods returning iterators
|
||||
// - empty() method
|
||||
// Example: [(container_pointer_no_template) = "light::ColorModeMask"]
|
||||
// generates: const light::ColorModeMask *supported_color_modes{};
|
||||
optional string container_pointer_no_template = 50014;
|
||||
}
|
||||
|
||||
@@ -22,9 +22,12 @@ bool HelloRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
|
||||
}
|
||||
bool HelloRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 1:
|
||||
this->client_info = value.as_string();
|
||||
case 1: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->client_info = value.data();
|
||||
this->client_info_len = value.size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@@ -42,18 +45,23 @@ void HelloResponse::calculate_size(ProtoSize &size) const {
|
||||
size.add_length(1, this->server_info_ref_.size());
|
||||
size.add_length(1, this->name_ref_.size());
|
||||
}
|
||||
bool ConnectRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
#ifdef USE_API_PASSWORD
|
||||
bool AuthenticationRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 1:
|
||||
this->password = value.as_string();
|
||||
case 1: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->password = value.data();
|
||||
this->password_len = value.size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ConnectResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_bool(1, this->invalid_password); }
|
||||
void ConnectResponse::calculate_size(ProtoSize &size) const { size.add_bool(1, this->invalid_password); }
|
||||
void AuthenticationResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_bool(1, this->invalid_password); }
|
||||
void AuthenticationResponse::calculate_size(ProtoSize &size) const { size.add_bool(1, this->invalid_password); }
|
||||
#endif
|
||||
#ifdef USE_AREAS
|
||||
void AreaInfo::encode(ProtoWriteBuffer buffer) const {
|
||||
buffer.encode_uint32(1, this->area_id);
|
||||
@@ -127,6 +135,12 @@ void DeviceInfoResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
#ifdef USE_AREAS
|
||||
buffer.encode_message(22, this->area);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
buffer.encode_uint32(23, this->zwave_proxy_feature_flags);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
buffer.encode_uint32(24, this->zwave_home_id);
|
||||
#endif
|
||||
}
|
||||
void DeviceInfoResponse::calculate_size(ProtoSize &size) const {
|
||||
#ifdef USE_API_PASSWORD
|
||||
@@ -179,6 +193,12 @@ void DeviceInfoResponse::calculate_size(ProtoSize &size) const {
|
||||
#ifdef USE_AREAS
|
||||
size.add_message_object(2, this->area);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
size.add_uint32(2, this->zwave_proxy_feature_flags);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
size.add_uint32(2, this->zwave_home_id);
|
||||
#endif
|
||||
}
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
void ListEntitiesBinarySensorResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
@@ -852,7 +872,7 @@ void HomeassistantServiceMap::calculate_size(ProtoSize &size) const {
|
||||
size.add_length(1, this->key_ref_.size());
|
||||
size.add_length(1, this->value.size());
|
||||
}
|
||||
void HomeassistantServiceResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
void HomeassistantActionRequest::encode(ProtoWriteBuffer buffer) const {
|
||||
buffer.encode_string(1, this->service_ref_);
|
||||
for (auto &it : this->data) {
|
||||
buffer.encode_message(2, it, true);
|
||||
@@ -864,13 +884,64 @@ void HomeassistantServiceResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
buffer.encode_message(4, it, true);
|
||||
}
|
||||
buffer.encode_bool(5, this->is_event);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
buffer.encode_uint32(6, this->call_id);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
buffer.encode_bool(7, this->wants_response);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
buffer.encode_string(8, this->response_template);
|
||||
#endif
|
||||
}
|
||||
void HomeassistantServiceResponse::calculate_size(ProtoSize &size) const {
|
||||
void HomeassistantActionRequest::calculate_size(ProtoSize &size) const {
|
||||
size.add_length(1, this->service_ref_.size());
|
||||
size.add_repeated_message(1, this->data);
|
||||
size.add_repeated_message(1, this->data_template);
|
||||
size.add_repeated_message(1, this->variables);
|
||||
size.add_bool(1, this->is_event);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
size.add_uint32(1, this->call_id);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
size.add_bool(1, this->wants_response);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
size.add_length(1, this->response_template.size());
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
bool HomeassistantActionResponse::decode_varint(uint32_t field_id, ProtoVarInt value) {
|
||||
switch (field_id) {
|
||||
case 1:
|
||||
this->call_id = value.as_uint32();
|
||||
break;
|
||||
case 2:
|
||||
this->success = value.as_bool();
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool HomeassistantActionResponse::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 3:
|
||||
this->error_message = value.as_string();
|
||||
break;
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
case 4: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->response_data = value.data();
|
||||
this->response_data_len = value.size();
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
@@ -903,9 +974,12 @@ bool HomeAssistantStateResponse::decode_length(uint32_t field_id, ProtoLengthDel
|
||||
#endif
|
||||
bool GetTimeResponse::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 2:
|
||||
this->timezone = value.as_string();
|
||||
case 2: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->timezone = value.data();
|
||||
this->timezone_len = value.size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@@ -990,6 +1064,17 @@ bool ExecuteServiceArgument::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ExecuteServiceArgument::decode(const uint8_t *buffer, size_t length) {
|
||||
uint32_t count_bool_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 6);
|
||||
this->bool_array.init(count_bool_array);
|
||||
uint32_t count_int_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 7);
|
||||
this->int_array.init(count_int_array);
|
||||
uint32_t count_float_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 8);
|
||||
this->float_array.init(count_float_array);
|
||||
uint32_t count_string_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 9);
|
||||
this->string_array.init(count_string_array);
|
||||
ProtoDecodableMessage::decode(buffer, length);
|
||||
}
|
||||
bool ExecuteServiceRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 2:
|
||||
@@ -1011,6 +1096,11 @@ bool ExecuteServiceRequest::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ExecuteServiceRequest::decode(const uint8_t *buffer, size_t length) {
|
||||
uint32_t count_args = ProtoDecodableMessage::count_repeated_field(buffer, length, 2);
|
||||
this->args.init(count_args);
|
||||
ProtoDecodableMessage::decode(buffer, length);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
void ListEntitiesCameraResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
@@ -1111,6 +1201,7 @@ void ListEntitiesClimateResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
#ifdef USE_DEVICES
|
||||
buffer.encode_uint32(26, this->device_id);
|
||||
#endif
|
||||
buffer.encode_uint32(27, this->feature_flags);
|
||||
}
|
||||
void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
||||
size.add_length(1, this->object_id_ref_.size());
|
||||
@@ -1165,6 +1256,7 @@ void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
||||
#ifdef USE_DEVICES
|
||||
size.add_uint32(2, this->device_id);
|
||||
#endif
|
||||
size.add_uint32(2, this->feature_flags);
|
||||
}
|
||||
void ClimateStateResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
buffer.encode_fixed32(1, this->key);
|
||||
@@ -2014,9 +2106,12 @@ bool BluetoothGATTWriteRequest::decode_varint(uint32_t field_id, ProtoVarInt val
|
||||
}
|
||||
bool BluetoothGATTWriteRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 4:
|
||||
this->data = value.as_string();
|
||||
case 4: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->data = value.data();
|
||||
this->data_len = value.size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@@ -2050,9 +2145,12 @@ bool BluetoothGATTWriteDescriptorRequest::decode_varint(uint32_t field_id, Proto
|
||||
}
|
||||
bool BluetoothGATTWriteDescriptorRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 3:
|
||||
this->data = value.as_string();
|
||||
case 3: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->data = value.data();
|
||||
this->data_len = value.size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@@ -2368,6 +2466,52 @@ void VoiceAssistantWakeWord::calculate_size(ProtoSize &size) const {
|
||||
}
|
||||
}
|
||||
}
|
||||
bool VoiceAssistantExternalWakeWord::decode_varint(uint32_t field_id, ProtoVarInt value) {
|
||||
switch (field_id) {
|
||||
case 5:
|
||||
this->model_size = value.as_uint32();
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool VoiceAssistantExternalWakeWord::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 1:
|
||||
this->id = value.as_string();
|
||||
break;
|
||||
case 2:
|
||||
this->wake_word = value.as_string();
|
||||
break;
|
||||
case 3:
|
||||
this->trained_languages.push_back(value.as_string());
|
||||
break;
|
||||
case 4:
|
||||
this->model_type = value.as_string();
|
||||
break;
|
||||
case 6:
|
||||
this->model_hash = value.as_string();
|
||||
break;
|
||||
case 7:
|
||||
this->url = value.as_string();
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool VoiceAssistantConfigurationRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 1:
|
||||
this->external_wake_words.emplace_back();
|
||||
value.decode_to_message(this->external_wake_words.back());
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void VoiceAssistantConfigurationResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
for (auto &it : this->available_wake_words) {
|
||||
buffer.encode_message(1, it, true);
|
||||
@@ -3011,5 +3155,53 @@ bool UpdateCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
bool ZWaveProxyFrame::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 1: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->data = value.data();
|
||||
this->data_len = value.size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ZWaveProxyFrame::encode(ProtoWriteBuffer buffer) const { buffer.encode_bytes(1, this->data, this->data_len); }
|
||||
void ZWaveProxyFrame::calculate_size(ProtoSize &size) const { size.add_length(1, this->data_len); }
|
||||
bool ZWaveProxyRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
|
||||
switch (field_id) {
|
||||
case 1:
|
||||
this->type = static_cast<enums::ZWaveProxyRequestType>(value.as_uint32());
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool ZWaveProxyRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 2: {
|
||||
// Use raw data directly to avoid allocation
|
||||
this->data = value.data();
|
||||
this->data_len = value.size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ZWaveProxyRequest::encode(ProtoWriteBuffer buffer) const {
|
||||
buffer.encode_uint32(1, static_cast<uint32_t>(this->type));
|
||||
buffer.encode_bytes(2, this->data, this->data_len);
|
||||
}
|
||||
void ZWaveProxyRequest::calculate_size(ProtoSize &size) const {
|
||||
size.add_uint32(1, static_cast<uint32_t>(this->type));
|
||||
size.add_length(2, this->data_len);
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -276,6 +276,13 @@ enum UpdateCommand : uint32_t {
|
||||
UPDATE_COMMAND_CHECK = 2,
|
||||
};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
enum ZWaveProxyRequestType : uint32_t {
|
||||
ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE = 0,
|
||||
ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE = 1,
|
||||
ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE = 2,
|
||||
};
|
||||
#endif
|
||||
|
||||
} // namespace enums
|
||||
|
||||
@@ -324,11 +331,12 @@ class CommandProtoMessage : public ProtoDecodableMessage {
|
||||
class HelloRequest final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 1;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 17;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 27;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "hello_request"; }
|
||||
#endif
|
||||
std::string client_info{};
|
||||
const uint8_t *client_info{nullptr};
|
||||
uint16_t client_info_len{0};
|
||||
uint32_t api_version_major{0};
|
||||
uint32_t api_version_minor{0};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
@@ -360,14 +368,16 @@ class HelloResponse final : public ProtoMessage {
|
||||
|
||||
protected:
|
||||
};
|
||||
class ConnectRequest final : public ProtoDecodableMessage {
|
||||
#ifdef USE_API_PASSWORD
|
||||
class AuthenticationRequest final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 3;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 9;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 19;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "connect_request"; }
|
||||
const char *message_name() const override { return "authentication_request"; }
|
||||
#endif
|
||||
std::string password{};
|
||||
const uint8_t *password{nullptr};
|
||||
uint16_t password_len{0};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -375,12 +385,12 @@ class ConnectRequest final : public ProtoDecodableMessage {
|
||||
protected:
|
||||
bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;
|
||||
};
|
||||
class ConnectResponse final : public ProtoMessage {
|
||||
class AuthenticationResponse final : public ProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 4;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 2;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "connect_response"; }
|
||||
const char *message_name() const override { return "authentication_response"; }
|
||||
#endif
|
||||
bool invalid_password{false};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
@@ -391,6 +401,7 @@ class ConnectResponse final : public ProtoMessage {
|
||||
|
||||
protected:
|
||||
};
|
||||
#endif
|
||||
class DisconnectRequest final : public ProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 5;
|
||||
@@ -490,7 +501,7 @@ class DeviceInfo final : public ProtoMessage {
|
||||
class DeviceInfoResponse final : public ProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 10;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 247;
|
||||
static constexpr uint16_t ESTIMATED_SIZE = 257;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "device_info_response"; }
|
||||
#endif
|
||||
@@ -550,6 +561,12 @@ class DeviceInfoResponse final : public ProtoMessage {
|
||||
#endif
|
||||
#ifdef USE_AREAS
|
||||
AreaInfo area{};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
uint32_t zwave_proxy_feature_flags{0};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
uint32_t zwave_home_id{0};
|
||||
#endif
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
@@ -773,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage {
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "list_entities_light_response"; }
|
||||
#endif
|
||||
const std::set<light::ColorMode> *supported_color_modes{};
|
||||
const light::ColorModeMask *supported_color_modes{};
|
||||
float min_mireds{0.0f};
|
||||
float max_mireds{0.0f};
|
||||
std::vector<std::string> effects{};
|
||||
@@ -1084,19 +1101,28 @@ class HomeassistantServiceMap final : public ProtoMessage {
|
||||
|
||||
protected:
|
||||
};
|
||||
class HomeassistantServiceResponse final : public ProtoMessage {
|
||||
class HomeassistantActionRequest final : public ProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 35;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 113;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 128;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "homeassistant_service_response"; }
|
||||
const char *message_name() const override { return "homeassistant_action_request"; }
|
||||
#endif
|
||||
StringRef service_ref_{};
|
||||
void set_service(const StringRef &ref) { this->service_ref_ = ref; }
|
||||
std::vector<HomeassistantServiceMap> data{};
|
||||
std::vector<HomeassistantServiceMap> data_template{};
|
||||
std::vector<HomeassistantServiceMap> variables{};
|
||||
FixedVector<HomeassistantServiceMap> data{};
|
||||
FixedVector<HomeassistantServiceMap> data_template{};
|
||||
FixedVector<HomeassistantServiceMap> variables{};
|
||||
bool is_event{false};
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
uint32_t call_id{0};
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
bool wants_response{false};
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
std::string response_template{};
|
||||
#endif
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
@@ -1106,6 +1132,30 @@ class HomeassistantServiceResponse final : public ProtoMessage {
|
||||
protected:
|
||||
};
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
class HomeassistantActionResponse final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 130;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 34;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "homeassistant_action_response"; }
|
||||
#endif
|
||||
uint32_t call_id{0};
|
||||
bool success{false};
|
||||
std::string error_message{};
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
const uint8_t *response_data{nullptr};
|
||||
uint16_t response_data_len{0};
|
||||
#endif
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;
|
||||
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
|
||||
};
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
class SubscribeHomeAssistantStatesRequest final : public ProtoMessage {
|
||||
public:
|
||||
@@ -1174,12 +1224,13 @@ class GetTimeRequest final : public ProtoMessage {
|
||||
class GetTimeResponse final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 37;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 14;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 24;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "get_time_response"; }
|
||||
#endif
|
||||
uint32_t epoch_seconds{0};
|
||||
std::string timezone{};
|
||||
const uint8_t *timezone{nullptr};
|
||||
uint16_t timezone_len{0};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -1212,7 +1263,7 @@ class ListEntitiesServicesResponse final : public ProtoMessage {
|
||||
StringRef name_ref_{};
|
||||
void set_name(const StringRef &ref) { this->name_ref_ = ref; }
|
||||
uint32_t key{0};
|
||||
std::vector<ListEntitiesServicesArgument> args{};
|
||||
FixedVector<ListEntitiesServicesArgument> args{};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
@@ -1228,10 +1279,11 @@ class ExecuteServiceArgument final : public ProtoDecodableMessage {
|
||||
float float_{0.0f};
|
||||
std::string string_{};
|
||||
int32_t int_{0};
|
||||
std::vector<bool> bool_array{};
|
||||
std::vector<int32_t> int_array{};
|
||||
std::vector<float> float_array{};
|
||||
std::vector<std::string> string_array{};
|
||||
FixedVector<bool> bool_array{};
|
||||
FixedVector<int32_t> int_array{};
|
||||
FixedVector<float> float_array{};
|
||||
FixedVector<std::string> string_array{};
|
||||
void decode(const uint8_t *buffer, size_t length) override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -1249,7 +1301,8 @@ class ExecuteServiceRequest final : public ProtoDecodableMessage {
|
||||
const char *message_name() const override { return "execute_service_request"; }
|
||||
#endif
|
||||
uint32_t key{0};
|
||||
std::vector<ExecuteServiceArgument> args{};
|
||||
FixedVector<ExecuteServiceArgument> args{};
|
||||
void decode(const uint8_t *buffer, size_t length) override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -1318,7 +1371,7 @@ class CameraImageRequest final : public ProtoDecodableMessage {
|
||||
class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 46;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 145;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 150;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "list_entities_climate_response"; }
|
||||
#endif
|
||||
@@ -1339,6 +1392,7 @@ class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
||||
bool supports_target_humidity{false};
|
||||
float visual_min_humidity{0.0f};
|
||||
float visual_max_humidity{0.0f};
|
||||
uint32_t feature_flags{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
@@ -1872,7 +1926,7 @@ class BluetoothGATTCharacteristic final : public ProtoMessage {
|
||||
std::array<uint64_t, 2> uuid{};
|
||||
uint32_t handle{0};
|
||||
uint32_t properties{0};
|
||||
std::vector<BluetoothGATTDescriptor> descriptors{};
|
||||
FixedVector<BluetoothGATTDescriptor> descriptors{};
|
||||
uint32_t short_uuid{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
@@ -1886,7 +1940,7 @@ class BluetoothGATTService final : public ProtoMessage {
|
||||
public:
|
||||
std::array<uint64_t, 2> uuid{};
|
||||
uint32_t handle{0};
|
||||
std::vector<BluetoothGATTCharacteristic> characteristics{};
|
||||
FixedVector<BluetoothGATTCharacteristic> characteristics{};
|
||||
uint32_t short_uuid{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
@@ -1971,14 +2025,15 @@ class BluetoothGATTReadResponse final : public ProtoMessage {
|
||||
class BluetoothGATTWriteRequest final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 75;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 19;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 29;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "bluetooth_gatt_write_request"; }
|
||||
#endif
|
||||
uint64_t address{0};
|
||||
uint32_t handle{0};
|
||||
bool response{false};
|
||||
std::string data{};
|
||||
const uint8_t *data{nullptr};
|
||||
uint16_t data_len{0};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -2006,13 +2061,14 @@ class BluetoothGATTReadDescriptorRequest final : public ProtoDecodableMessage {
|
||||
class BluetoothGATTWriteDescriptorRequest final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 77;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 17;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 27;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "bluetooth_gatt_write_descriptor_request"; }
|
||||
#endif
|
||||
uint64_t address{0};
|
||||
uint32_t handle{0};
|
||||
std::string data{};
|
||||
const uint8_t *data{nullptr};
|
||||
uint16_t data_len{0};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -2437,18 +2493,37 @@ class VoiceAssistantWakeWord final : public ProtoMessage {
|
||||
|
||||
protected:
|
||||
};
|
||||
class VoiceAssistantConfigurationRequest final : public ProtoMessage {
|
||||
class VoiceAssistantExternalWakeWord final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 121;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 0;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "voice_assistant_configuration_request"; }
|
||||
#endif
|
||||
std::string id{};
|
||||
std::string wake_word{};
|
||||
std::vector<std::string> trained_languages{};
|
||||
std::string model_type{};
|
||||
uint32_t model_size{0};
|
||||
std::string model_hash{};
|
||||
std::string url{};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;
|
||||
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
|
||||
};
|
||||
class VoiceAssistantConfigurationRequest final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 121;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 34;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "voice_assistant_configuration_request"; }
|
||||
#endif
|
||||
std::vector<VoiceAssistantExternalWakeWord> external_wake_words{};
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;
|
||||
};
|
||||
class VoiceAssistantConfigurationResponse final : public ProtoMessage {
|
||||
public:
|
||||
@@ -2911,5 +2986,45 @@ class UpdateCommandRequest final : public CommandProtoMessage {
|
||||
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
|
||||
};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
class ZWaveProxyFrame final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 128;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 19;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "z_wave_proxy_frame"; }
|
||||
#endif
|
||||
const uint8_t *data{nullptr};
|
||||
uint16_t data_len{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;
|
||||
};
|
||||
class ZWaveProxyRequest final : public ProtoDecodableMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 129;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 21;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "z_wave_proxy_request"; }
|
||||
#endif
|
||||
enums::ZWaveProxyRequestType type{};
|
||||
const uint8_t *data{nullptr};
|
||||
uint16_t data_len{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;
|
||||
bool decode_varint(uint32_t field_id, ProtoVarInt value) override;
|
||||
};
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -655,10 +655,26 @@ template<> const char *proto_enum_to_string<enums::UpdateCommand>(enums::UpdateC
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
template<> const char *proto_enum_to_string<enums::ZWaveProxyRequestType>(enums::ZWaveProxyRequestType value) {
|
||||
switch (value) {
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE";
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE";
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void HelloRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HelloRequest");
|
||||
dump_field(out, "client_info", this->client_info);
|
||||
out.append(" client_info: ");
|
||||
out.append(format_hex_pretty(this->client_info, this->client_info_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "api_version_major", this->api_version_major);
|
||||
dump_field(out, "api_version_minor", this->api_version_minor);
|
||||
}
|
||||
@@ -669,8 +685,18 @@ void HelloResponse::dump_to(std::string &out) const {
|
||||
dump_field(out, "server_info", this->server_info_ref_);
|
||||
dump_field(out, "name", this->name_ref_);
|
||||
}
|
||||
void ConnectRequest::dump_to(std::string &out) const { dump_field(out, "password", this->password); }
|
||||
void ConnectResponse::dump_to(std::string &out) const { dump_field(out, "invalid_password", this->invalid_password); }
|
||||
#ifdef USE_API_PASSWORD
|
||||
void AuthenticationRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "AuthenticationRequest");
|
||||
out.append(" password: ");
|
||||
out.append(format_hex_pretty(this->password, this->password_len));
|
||||
out.append("\n");
|
||||
}
|
||||
void AuthenticationResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "AuthenticationResponse");
|
||||
dump_field(out, "invalid_password", this->invalid_password);
|
||||
}
|
||||
#endif
|
||||
void DisconnectRequest::dump_to(std::string &out) const { out.append("DisconnectRequest {}"); }
|
||||
void DisconnectResponse::dump_to(std::string &out) const { out.append("DisconnectResponse {}"); }
|
||||
void PingRequest::dump_to(std::string &out) const { out.append("PingRequest {}"); }
|
||||
@@ -749,6 +775,12 @@ void DeviceInfoResponse::dump_to(std::string &out) const {
|
||||
this->area.dump_to(out);
|
||||
out.append("\n");
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
dump_field(out, "zwave_proxy_feature_flags", this->zwave_proxy_feature_flags);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
dump_field(out, "zwave_home_id", this->zwave_home_id);
|
||||
#endif
|
||||
}
|
||||
void ListEntitiesRequest::dump_to(std::string &out) const { out.append("ListEntitiesRequest {}"); }
|
||||
void ListEntitiesDoneResponse::dump_to(std::string &out) const { out.append("ListEntitiesDoneResponse {}"); }
|
||||
@@ -1071,8 +1103,8 @@ void HomeassistantServiceMap::dump_to(std::string &out) const {
|
||||
dump_field(out, "key", this->key_ref_);
|
||||
dump_field(out, "value", this->value);
|
||||
}
|
||||
void HomeassistantServiceResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HomeassistantServiceResponse");
|
||||
void HomeassistantActionRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HomeassistantActionRequest");
|
||||
dump_field(out, "service", this->service_ref_);
|
||||
for (const auto &it : this->data) {
|
||||
out.append(" data: ");
|
||||
@@ -1090,6 +1122,28 @@ void HomeassistantServiceResponse::dump_to(std::string &out) const {
|
||||
out.append("\n");
|
||||
}
|
||||
dump_field(out, "is_event", this->is_event);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
dump_field(out, "call_id", this->call_id);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
dump_field(out, "wants_response", this->wants_response);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
dump_field(out, "response_template", this->response_template);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
void HomeassistantActionResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HomeassistantActionResponse");
|
||||
dump_field(out, "call_id", this->call_id);
|
||||
dump_field(out, "success", this->success);
|
||||
dump_field(out, "error_message", this->error_message);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
out.append(" response_data: ");
|
||||
out.append(format_hex_pretty(this->response_data, this->response_data_len));
|
||||
out.append("\n");
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
@@ -1113,7 +1167,9 @@ void GetTimeRequest::dump_to(std::string &out) const { out.append("GetTimeReques
|
||||
void GetTimeResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "GetTimeResponse");
|
||||
dump_field(out, "epoch_seconds", this->epoch_seconds);
|
||||
dump_field(out, "timezone", this->timezone);
|
||||
out.append(" timezone: ");
|
||||
out.append(format_hex_pretty(this->timezone, this->timezone_len));
|
||||
out.append("\n");
|
||||
}
|
||||
#ifdef USE_API_SERVICES
|
||||
void ListEntitiesServicesArgument::dump_to(std::string &out) const {
|
||||
@@ -1236,6 +1292,7 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const {
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "feature_flags", this->feature_flags);
|
||||
}
|
||||
void ClimateStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ClimateStateResponse");
|
||||
@@ -1626,7 +1683,7 @@ void BluetoothGATTWriteRequest::dump_to(std::string &out) const {
|
||||
dump_field(out, "handle", this->handle);
|
||||
dump_field(out, "response", this->response);
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(reinterpret_cast<const uint8_t *>(this->data.data()), this->data.size()));
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append("\n");
|
||||
}
|
||||
void BluetoothGATTReadDescriptorRequest::dump_to(std::string &out) const {
|
||||
@@ -1639,7 +1696,7 @@ void BluetoothGATTWriteDescriptorRequest::dump_to(std::string &out) const {
|
||||
dump_field(out, "address", this->address);
|
||||
dump_field(out, "handle", this->handle);
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(reinterpret_cast<const uint8_t *>(this->data.data()), this->data.size()));
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append("\n");
|
||||
}
|
||||
void BluetoothGATTNotifyRequest::dump_to(std::string &out) const {
|
||||
@@ -1792,8 +1849,25 @@ void VoiceAssistantWakeWord::dump_to(std::string &out) const {
|
||||
dump_field(out, "trained_languages", it, 4);
|
||||
}
|
||||
}
|
||||
void VoiceAssistantExternalWakeWord::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "VoiceAssistantExternalWakeWord");
|
||||
dump_field(out, "id", this->id);
|
||||
dump_field(out, "wake_word", this->wake_word);
|
||||
for (const auto &it : this->trained_languages) {
|
||||
dump_field(out, "trained_languages", it, 4);
|
||||
}
|
||||
dump_field(out, "model_type", this->model_type);
|
||||
dump_field(out, "model_size", this->model_size);
|
||||
dump_field(out, "model_hash", this->model_hash);
|
||||
dump_field(out, "url", this->url);
|
||||
}
|
||||
void VoiceAssistantConfigurationRequest::dump_to(std::string &out) const {
|
||||
out.append("VoiceAssistantConfigurationRequest {}");
|
||||
MessageDumpHelper helper(out, "VoiceAssistantConfigurationRequest");
|
||||
for (const auto &it : this->external_wake_words) {
|
||||
out.append(" external_wake_words: ");
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
}
|
||||
void VoiceAssistantConfigurationResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "VoiceAssistantConfigurationResponse");
|
||||
@@ -2102,6 +2176,21 @@ void UpdateCommandRequest::dump_to(std::string &out) const {
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void ZWaveProxyFrame::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ZWaveProxyFrame");
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append("\n");
|
||||
}
|
||||
void ZWaveProxyRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ZWaveProxyRequest");
|
||||
dump_field(out, "type", static_cast<enums::ZWaveProxyRequestType>(this->type));
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append("\n");
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
|
||||
@@ -24,15 +24,17 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
this->on_hello_request(msg);
|
||||
break;
|
||||
}
|
||||
case ConnectRequest::MESSAGE_TYPE: {
|
||||
ConnectRequest msg;
|
||||
#ifdef USE_API_PASSWORD
|
||||
case AuthenticationRequest::MESSAGE_TYPE: {
|
||||
AuthenticationRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_connect_request: %s", msg.dump().c_str());
|
||||
ESP_LOGVV(TAG, "on_authentication_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_connect_request(msg);
|
||||
this->on_authentication_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
case DisconnectRequest::MESSAGE_TYPE: {
|
||||
DisconnectRequest msg;
|
||||
// Empty message: no decode needed
|
||||
@@ -546,7 +548,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
case VoiceAssistantConfigurationRequest::MESSAGE_TYPE: {
|
||||
VoiceAssistantConfigurationRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_voice_assistant_configuration_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -586,6 +588,39 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
this->on_bluetooth_scanner_set_mode_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
case ZWaveProxyFrame::MESSAGE_TYPE: {
|
||||
ZWaveProxyFrame msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_z_wave_proxy_frame: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_z_wave_proxy_frame(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
case ZWaveProxyRequest::MESSAGE_TYPE: {
|
||||
ZWaveProxyRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_z_wave_proxy_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_z_wave_proxy_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
case HomeassistantActionResponse::MESSAGE_TYPE: {
|
||||
HomeassistantActionResponse msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_homeassistant_action_response: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_homeassistant_action_response(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
break;
|
||||
@@ -597,11 +632,13 @@ void APIServerConnection::on_hello_request(const HelloRequest &msg) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_connect_request(const ConnectRequest &msg) {
|
||||
if (!this->send_connect_response(msg)) {
|
||||
#ifdef USE_API_PASSWORD
|
||||
void APIServerConnection::on_authentication_request(const AuthenticationRequest &msg) {
|
||||
if (!this->send_authenticate_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
void APIServerConnection::on_disconnect_request(const DisconnectRequest &msg) {
|
||||
if (!this->send_disconnect_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
@@ -613,241 +650,139 @@ void APIServerConnection::on_ping_request(const PingRequest &msg) {
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_device_info_request(const DeviceInfoRequest &msg) {
|
||||
if (this->check_connection_setup_() && !this->send_device_info_response(msg)) {
|
||||
if (!this->send_device_info_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_list_entities_request(const ListEntitiesRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->list_entities(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_list_entities_request(const ListEntitiesRequest &msg) { this->list_entities(msg); }
|
||||
void APIServerConnection::on_subscribe_states_request(const SubscribeStatesRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_states(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_subscribe_logs_request(const SubscribeLogsRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_logs(msg);
|
||||
}
|
||||
this->subscribe_states(msg);
|
||||
}
|
||||
void APIServerConnection::on_subscribe_logs_request(const SubscribeLogsRequest &msg) { this->subscribe_logs(msg); }
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void APIServerConnection::on_subscribe_homeassistant_services_request(
|
||||
const SubscribeHomeassistantServicesRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_homeassistant_services(msg);
|
||||
}
|
||||
this->subscribe_homeassistant_services(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
void APIServerConnection::on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_home_assistant_states(msg);
|
||||
}
|
||||
this->subscribe_home_assistant_states(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_SERVICES
|
||||
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->execute_service(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) { this->execute_service(msg); }
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
void APIServerConnection::on_noise_encryption_set_key_request(const NoiseEncryptionSetKeyRequest &msg) {
|
||||
if (this->check_authenticated_() && !this->send_noise_encryption_set_key_response(msg)) {
|
||||
if (!this->send_noise_encryption_set_key_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BUTTON
|
||||
void APIServerConnection::on_button_command_request(const ButtonCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->button_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_button_command_request(const ButtonCommandRequest &msg) { this->button_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
void APIServerConnection::on_camera_image_request(const CameraImageRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->camera_image(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_camera_image_request(const CameraImageRequest &msg) { this->camera_image(msg); }
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
void APIServerConnection::on_climate_command_request(const ClimateCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->climate_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_climate_command_request(const ClimateCommandRequest &msg) { this->climate_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_COVER
|
||||
void APIServerConnection::on_cover_command_request(const CoverCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->cover_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_cover_command_request(const CoverCommandRequest &msg) { this->cover_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_DATETIME_DATE
|
||||
void APIServerConnection::on_date_command_request(const DateCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->date_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_date_command_request(const DateCommandRequest &msg) { this->date_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_DATETIME_DATETIME
|
||||
void APIServerConnection::on_date_time_command_request(const DateTimeCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->datetime_command(msg);
|
||||
}
|
||||
this->datetime_command(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_FAN
|
||||
void APIServerConnection::on_fan_command_request(const FanCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->fan_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_fan_command_request(const FanCommandRequest &msg) { this->fan_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_LIGHT
|
||||
void APIServerConnection::on_light_command_request(const LightCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->light_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_light_command_request(const LightCommandRequest &msg) { this->light_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_LOCK
|
||||
void APIServerConnection::on_lock_command_request(const LockCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->lock_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_lock_command_request(const LockCommandRequest &msg) { this->lock_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_MEDIA_PLAYER
|
||||
void APIServerConnection::on_media_player_command_request(const MediaPlayerCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->media_player_command(msg);
|
||||
}
|
||||
this->media_player_command(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_NUMBER
|
||||
void APIServerConnection::on_number_command_request(const NumberCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->number_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_number_command_request(const NumberCommandRequest &msg) { this->number_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_SELECT
|
||||
void APIServerConnection::on_select_command_request(const SelectCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->select_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_select_command_request(const SelectCommandRequest &msg) { this->select_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_SIREN
|
||||
void APIServerConnection::on_siren_command_request(const SirenCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->siren_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_siren_command_request(const SirenCommandRequest &msg) { this->siren_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_SWITCH
|
||||
void APIServerConnection::on_switch_command_request(const SwitchCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->switch_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_switch_command_request(const SwitchCommandRequest &msg) { this->switch_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_TEXT
|
||||
void APIServerConnection::on_text_command_request(const TextCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->text_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_text_command_request(const TextCommandRequest &msg) { this->text_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_DATETIME_TIME
|
||||
void APIServerConnection::on_time_command_request(const TimeCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->time_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_time_command_request(const TimeCommandRequest &msg) { this->time_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_UPDATE
|
||||
void APIServerConnection::on_update_command_request(const UpdateCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->update_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_update_command_request(const UpdateCommandRequest &msg) { this->update_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_VALVE
|
||||
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->valve_command(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) { this->valve_command(msg); }
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_subscribe_bluetooth_le_advertisements_request(
|
||||
const SubscribeBluetoothLEAdvertisementsRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_bluetooth_le_advertisements(msg);
|
||||
}
|
||||
this->subscribe_bluetooth_le_advertisements(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_device_request(const BluetoothDeviceRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_device_request(msg);
|
||||
}
|
||||
this->bluetooth_device_request(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_get_services_request(const BluetoothGATTGetServicesRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_get_services(msg);
|
||||
}
|
||||
this->bluetooth_gatt_get_services(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_read_request(const BluetoothGATTReadRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_read(msg);
|
||||
}
|
||||
this->bluetooth_gatt_read(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_write_request(const BluetoothGATTWriteRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_write(msg);
|
||||
}
|
||||
this->bluetooth_gatt_write(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_read_descriptor_request(const BluetoothGATTReadDescriptorRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_read_descriptor(msg);
|
||||
}
|
||||
this->bluetooth_gatt_read_descriptor(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_write_descriptor_request(const BluetoothGATTWriteDescriptorRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_write_descriptor(msg);
|
||||
}
|
||||
this->bluetooth_gatt_write_descriptor(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_notify_request(const BluetoothGATTNotifyRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_notify(msg);
|
||||
}
|
||||
this->bluetooth_gatt_notify(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_subscribe_bluetooth_connections_free_request(
|
||||
const SubscribeBluetoothConnectionsFreeRequest &msg) {
|
||||
if (this->check_authenticated_() && !this->send_subscribe_bluetooth_connections_free_response(msg)) {
|
||||
if (!this->send_subscribe_bluetooth_connections_free_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
@@ -855,45 +790,68 @@ void APIServerConnection::on_subscribe_bluetooth_connections_free_request(
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_unsubscribe_bluetooth_le_advertisements_request(
|
||||
const UnsubscribeBluetoothLEAdvertisementsRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->unsubscribe_bluetooth_le_advertisements(msg);
|
||||
}
|
||||
this->unsubscribe_bluetooth_le_advertisements(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_scanner_set_mode_request(const BluetoothScannerSetModeRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_scanner_set_mode(msg);
|
||||
}
|
||||
this->bluetooth_scanner_set_mode(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
void APIServerConnection::on_subscribe_voice_assistant_request(const SubscribeVoiceAssistantRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_voice_assistant(msg);
|
||||
}
|
||||
this->subscribe_voice_assistant(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
void APIServerConnection::on_voice_assistant_configuration_request(const VoiceAssistantConfigurationRequest &msg) {
|
||||
if (this->check_authenticated_() && !this->send_voice_assistant_get_configuration_response(msg)) {
|
||||
if (!this->send_voice_assistant_get_configuration_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
void APIServerConnection::on_voice_assistant_set_configuration(const VoiceAssistantSetConfiguration &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->voice_assistant_set_configuration(msg);
|
||||
}
|
||||
this->voice_assistant_set_configuration(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
void APIServerConnection::on_alarm_control_panel_command_request(const AlarmControlPanelCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->alarm_control_panel_command(msg);
|
||||
}
|
||||
this->alarm_control_panel_command(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServerConnection::on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) { this->zwave_proxy_frame(msg); }
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServerConnection::on_z_wave_proxy_request(const ZWaveProxyRequest &msg) { this->zwave_proxy_request(msg); }
|
||||
#endif
|
||||
|
||||
void APIServerConnection::read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) {
|
||||
// Check authentication/connection requirements for messages
|
||||
switch (msg_type) {
|
||||
case HelloRequest::MESSAGE_TYPE: // No setup required
|
||||
#ifdef USE_API_PASSWORD
|
||||
case AuthenticationRequest::MESSAGE_TYPE: // No setup required
|
||||
#endif
|
||||
case DisconnectRequest::MESSAGE_TYPE: // No setup required
|
||||
case PingRequest::MESSAGE_TYPE: // No setup required
|
||||
break; // Skip all checks for these messages
|
||||
case DeviceInfoRequest::MESSAGE_TYPE: // Connection setup only
|
||||
if (!this->check_connection_setup_()) {
|
||||
return; // Connection not setup
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// All other messages require authentication (which includes connection check)
|
||||
if (!this->check_authenticated_()) {
|
||||
return; // Authentication failed
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Call base implementation to process the message
|
||||
APIServerConnectionBase::read_message(msg_size, msg_type, msg_data);
|
||||
}
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -26,7 +26,9 @@ class APIServerConnectionBase : public ProtoService {
|
||||
|
||||
virtual void on_hello_request(const HelloRequest &value){};
|
||||
|
||||
virtual void on_connect_request(const ConnectRequest &value){};
|
||||
#ifdef USE_API_PASSWORD
|
||||
virtual void on_authentication_request(const AuthenticationRequest &value){};
|
||||
#endif
|
||||
|
||||
virtual void on_disconnect_request(const DisconnectRequest &value){};
|
||||
virtual void on_disconnect_response(const DisconnectResponse &value){};
|
||||
@@ -64,6 +66,9 @@ class APIServerConnectionBase : public ProtoService {
|
||||
virtual void on_subscribe_homeassistant_services_request(const SubscribeHomeassistantServicesRequest &value){};
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
virtual void on_homeassistant_action_response(const HomeassistantActionResponse &value){};
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
virtual void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &value){};
|
||||
#endif
|
||||
@@ -205,6 +210,12 @@ class APIServerConnectionBase : public ProtoService {
|
||||
|
||||
#ifdef USE_UPDATE
|
||||
virtual void on_update_command_request(const UpdateCommandRequest &value){};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void on_z_wave_proxy_frame(const ZWaveProxyFrame &value){};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void on_z_wave_proxy_request(const ZWaveProxyRequest &value){};
|
||||
#endif
|
||||
protected:
|
||||
void read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) override;
|
||||
@@ -213,7 +224,9 @@ class APIServerConnectionBase : public ProtoService {
|
||||
class APIServerConnection : public APIServerConnectionBase {
|
||||
public:
|
||||
virtual bool send_hello_response(const HelloRequest &msg) = 0;
|
||||
virtual bool send_connect_response(const ConnectRequest &msg) = 0;
|
||||
#ifdef USE_API_PASSWORD
|
||||
virtual bool send_authenticate_response(const AuthenticationRequest &msg) = 0;
|
||||
#endif
|
||||
virtual bool send_disconnect_response(const DisconnectRequest &msg) = 0;
|
||||
virtual bool send_ping_response(const PingRequest &msg) = 0;
|
||||
virtual bool send_device_info_response(const DeviceInfoRequest &msg) = 0;
|
||||
@@ -331,10 +344,18 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#endif
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
virtual void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void zwave_proxy_frame(const ZWaveProxyFrame &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void zwave_proxy_request(const ZWaveProxyRequest &msg) = 0;
|
||||
#endif
|
||||
protected:
|
||||
void on_hello_request(const HelloRequest &msg) override;
|
||||
void on_connect_request(const ConnectRequest &msg) override;
|
||||
#ifdef USE_API_PASSWORD
|
||||
void on_authentication_request(const AuthenticationRequest &msg) override;
|
||||
#endif
|
||||
void on_disconnect_request(const DisconnectRequest &msg) override;
|
||||
void on_ping_request(const PingRequest &msg) override;
|
||||
void on_device_info_request(const DeviceInfoRequest &msg) override;
|
||||
@@ -453,6 +474,13 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
void on_alarm_control_panel_command_request(const AlarmControlPanelCommandRequest &msg) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_z_wave_proxy_request(const ZWaveProxyRequest &msg) override;
|
||||
#endif
|
||||
void read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) override;
|
||||
};
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -9,12 +9,16 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/util.h"
|
||||
#include "esphome/core/version.h"
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
#include "homeassistant_service.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
#endif
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
@@ -37,12 +41,14 @@ void APIServer::setup() {
|
||||
|
||||
this->noise_pref_ = global_preferences->make_preference<SavedNoisePsk>(hash, true);
|
||||
|
||||
#ifndef USE_API_NOISE_PSK_FROM_YAML
|
||||
// Only load saved PSK if not set from YAML
|
||||
SavedNoisePsk noise_pref_saved{};
|
||||
if (this->noise_pref_.load(&noise_pref_saved)) {
|
||||
ESP_LOGD(TAG, "Loaded saved Noise PSK");
|
||||
|
||||
this->set_noise_psk(noise_pref_saved.psk);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// Schedule reboot if no clients connect within timeout
|
||||
@@ -85,7 +91,7 @@ void APIServer::setup() {
|
||||
return;
|
||||
}
|
||||
|
||||
err = this->socket_->listen(4);
|
||||
err = this->socket_->listen(this->listen_backlog_);
|
||||
if (err != 0) {
|
||||
ESP_LOGW(TAG, "Socket unable to listen: errno %d", errno);
|
||||
this->mark_failed();
|
||||
@@ -138,9 +144,19 @@ void APIServer::loop() {
|
||||
while (true) {
|
||||
struct sockaddr_storage source_addr;
|
||||
socklen_t addr_len = sizeof(source_addr);
|
||||
|
||||
auto sock = this->socket_->accept_loop_monitored((struct sockaddr *) &source_addr, &addr_len);
|
||||
if (!sock)
|
||||
break;
|
||||
|
||||
// Check if we're at the connection limit
|
||||
if (this->clients_.size() >= this->max_connections_) {
|
||||
ESP_LOGW(TAG, "Max connections (%d), rejecting %s", this->max_connections_, sock->getpeername().c_str());
|
||||
// Immediately close - socket destructor will handle cleanup
|
||||
sock.reset();
|
||||
continue;
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Accept %s", sock->getpeername().c_str());
|
||||
|
||||
auto *conn = new APIConnection(std::move(sock), this);
|
||||
@@ -165,7 +181,8 @@ void APIServer::loop() {
|
||||
// Network is down - disconnect all clients
|
||||
for (auto &client : this->clients_) {
|
||||
client->on_fatal_error();
|
||||
ESP_LOGW(TAG, "%s: Network down; disconnect", client->get_client_combined_info().c_str());
|
||||
ESP_LOGW(TAG, "%s (%s): Network down; disconnect", client->client_info_.name.c_str(),
|
||||
client->client_info_.peername.c_str());
|
||||
}
|
||||
// Continue to process and clean up the clients below
|
||||
}
|
||||
@@ -204,8 +221,10 @@ void APIServer::loop() {
|
||||
void APIServer::dump_config() {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Server:\n"
|
||||
" Address: %s:%u",
|
||||
network::get_use_address().c_str(), this->port_);
|
||||
" Address: %s:%u\n"
|
||||
" Listen backlog: %u\n"
|
||||
" Max connections: %u",
|
||||
network::get_use_address().c_str(), this->port_, this->listen_backlog_, this->max_connections_);
|
||||
#ifdef USE_API_NOISE
|
||||
ESP_LOGCONFIG(TAG, " Noise encryption: %s", YESNO(this->noise_ctx_->has_psk()));
|
||||
if (!this->noise_ctx_->has_psk()) {
|
||||
@@ -217,12 +236,12 @@ void APIServer::dump_config() {
|
||||
}
|
||||
|
||||
#ifdef USE_API_PASSWORD
|
||||
bool APIServer::check_password(const std::string &password) const {
|
||||
bool APIServer::check_password(const uint8_t *password_data, size_t password_len) const {
|
||||
// depend only on input password length
|
||||
const char *a = this->password_.c_str();
|
||||
uint32_t len_a = this->password_.length();
|
||||
const char *b = password.c_str();
|
||||
uint32_t len_b = password.length();
|
||||
const char *b = reinterpret_cast<const char *>(password_data);
|
||||
uint32_t len_b = password_len;
|
||||
|
||||
// disable optimization with volatile
|
||||
volatile uint32_t length = len_b;
|
||||
@@ -245,6 +264,7 @@ bool APIServer::check_password(const std::string &password) const {
|
||||
|
||||
return result == 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void APIServer::handle_disconnect(APIConnection *conn) {}
|
||||
@@ -355,6 +375,15 @@ void APIServer::on_update(update::UpdateEntity *obj) {
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServer::on_zwave_proxy_request(const esphome::api::ProtoMessage &msg) {
|
||||
// We could add code to manage a second subscription type, but, since this message type is
|
||||
// very infrequent and small, we simply send it to all clients
|
||||
for (auto &c : this->clients_)
|
||||
c->send_message(msg, api::ZWaveProxyRequest::MESSAGE_TYPE);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
API_DISPATCH_UPDATE(alarm_control_panel::AlarmControlPanel, alarm_control_panel)
|
||||
#endif
|
||||
@@ -370,12 +399,43 @@ void APIServer::set_password(const std::string &password) { this->password_ = pa
|
||||
void APIServer::set_batch_delay(uint16_t batch_delay) { this->batch_delay_ = batch_delay; }
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void APIServer::send_homeassistant_service_call(const HomeassistantServiceResponse &call) {
|
||||
void APIServer::send_homeassistant_action(const HomeassistantActionRequest &call) {
|
||||
for (auto &client : this->clients_) {
|
||||
client->send_homeassistant_service_call(call);
|
||||
client->send_homeassistant_action(call);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
void APIServer::register_action_response_callback(uint32_t call_id, ActionResponseCallback callback) {
|
||||
this->action_response_callbacks_.push_back({call_id, std::move(callback)});
|
||||
}
|
||||
|
||||
void APIServer::handle_action_response(uint32_t call_id, bool success, const std::string &error_message) {
|
||||
for (auto it = this->action_response_callbacks_.begin(); it != this->action_response_callbacks_.end(); ++it) {
|
||||
if (it->call_id == call_id) {
|
||||
auto callback = std::move(it->callback);
|
||||
this->action_response_callbacks_.erase(it);
|
||||
ActionResponse response(success, error_message);
|
||||
callback(response);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
void APIServer::handle_action_response(uint32_t call_id, bool success, const std::string &error_message,
|
||||
const uint8_t *response_data, size_t response_data_len) {
|
||||
for (auto it = this->action_response_callbacks_.begin(); it != this->action_response_callbacks_.end(); ++it) {
|
||||
if (it->call_id == call_id) {
|
||||
auto callback = std::move(it->callback);
|
||||
this->action_response_callbacks_.erase(it);
|
||||
ActionResponse response(success, error_message, response_data, response_data_len);
|
||||
callback(response);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#endif // USE_API_HOMEASSISTANT_SERVICES
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
void APIServer::subscribe_home_assistant_state(std::string entity_id, optional<std::string> attribute,
|
||||
@@ -409,6 +469,12 @@ void APIServer::set_reboot_timeout(uint32_t reboot_timeout) { this->reboot_timeo
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
|
||||
#ifdef USE_API_NOISE_PSK_FROM_YAML
|
||||
// When PSK is set from YAML, this function should never be called
|
||||
// but if it is, reject the change
|
||||
ESP_LOGW(TAG, "Key set in YAML");
|
||||
return false;
|
||||
#else
|
||||
auto &old_psk = this->noise_ctx_->get_psk();
|
||||
if (std::equal(old_psk.begin(), old_psk.end(), psk.begin())) {
|
||||
ESP_LOGW(TAG, "New PSK matches old");
|
||||
@@ -437,6 +503,7 @@ bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
|
||||
});
|
||||
}
|
||||
return true;
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
#include "user_services.h"
|
||||
#endif
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome::api {
|
||||
@@ -37,13 +38,15 @@ class APIServer : public Component, public Controller {
|
||||
void on_shutdown() override;
|
||||
bool teardown() override;
|
||||
#ifdef USE_API_PASSWORD
|
||||
bool check_password(const std::string &password) const;
|
||||
bool check_password(const uint8_t *password_data, size_t password_len) const;
|
||||
void set_password(const std::string &password);
|
||||
#endif
|
||||
void set_port(uint16_t port);
|
||||
void set_reboot_timeout(uint32_t reboot_timeout);
|
||||
void set_batch_delay(uint16_t batch_delay);
|
||||
uint16_t get_batch_delay() const { return batch_delay_; }
|
||||
void set_listen_backlog(uint8_t listen_backlog) { this->listen_backlog_ = listen_backlog; }
|
||||
void set_max_connections(uint8_t max_connections) { this->max_connections_ = max_connections; }
|
||||
|
||||
// Get reference to shared buffer for API connections
|
||||
std::vector<uint8_t> &get_shared_buffer_ref() { return shared_write_buffer_; }
|
||||
@@ -107,8 +110,19 @@ class APIServer : public Component, public Controller {
|
||||
void on_media_player_update(media_player::MediaPlayer *obj) override;
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void send_homeassistant_service_call(const HomeassistantServiceResponse &call);
|
||||
#endif
|
||||
void send_homeassistant_action(const HomeassistantActionRequest &call);
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
// Action response handling
|
||||
using ActionResponseCallback = std::function<void(const class ActionResponse &)>;
|
||||
void register_action_response_callback(uint32_t call_id, ActionResponseCallback callback);
|
||||
void handle_action_response(uint32_t call_id, bool success, const std::string &error_message);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
void handle_action_response(uint32_t call_id, bool success, const std::string &error_message,
|
||||
const uint8_t *response_data, size_t response_data_len);
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#endif // USE_API_HOMEASSISTANT_SERVICES
|
||||
#ifdef USE_API_SERVICES
|
||||
void register_user_service(UserServiceDescriptor *descriptor) { this->user_services_.push_back(descriptor); }
|
||||
#endif
|
||||
@@ -125,6 +139,9 @@ class APIServer : public Component, public Controller {
|
||||
#ifdef USE_UPDATE
|
||||
void on_update(update::UpdateEntity *obj) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_zwave_proxy_request(const esphome::api::ProtoMessage &msg);
|
||||
#endif
|
||||
|
||||
bool is_connected() const;
|
||||
|
||||
@@ -181,12 +198,23 @@ class APIServer : public Component, public Controller {
|
||||
#ifdef USE_API_SERVICES
|
||||
std::vector<UserServiceDescriptor *> user_services_;
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
struct PendingActionResponse {
|
||||
uint32_t call_id;
|
||||
ActionResponseCallback callback;
|
||||
};
|
||||
std::vector<PendingActionResponse> action_response_callbacks_;
|
||||
#endif
|
||||
|
||||
// Group smaller types together
|
||||
uint16_t port_{6053};
|
||||
uint16_t batch_delay_{100};
|
||||
// Connection limits - these defaults will be overridden by config values
|
||||
// from cv.SplitDefault in __init__.py which sets platform-specific defaults
|
||||
uint8_t listen_backlog_{4};
|
||||
uint8_t max_connections_{8};
|
||||
bool shutting_down_ = false;
|
||||
// 5 bytes used, 3 bytes padding
|
||||
// 7 bytes used, 1 byte padding
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
std::shared_ptr<APINoiseContext> noise_ctx_ = std::make_shared<APINoiseContext>();
|
||||
|
||||
@@ -179,9 +179,9 @@ class CustomAPIDevice {
|
||||
* @param service_name The service to call.
|
||||
*/
|
||||
void call_homeassistant_service(const std::string &service_name) {
|
||||
HomeassistantServiceResponse resp;
|
||||
HomeassistantActionRequest resp;
|
||||
resp.set_service(StringRef(service_name));
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
}
|
||||
|
||||
/** Call a Home Assistant service from ESPHome.
|
||||
@@ -199,15 +199,15 @@ class CustomAPIDevice {
|
||||
* @param data The data for the service call, mapping from string to string.
|
||||
*/
|
||||
void call_homeassistant_service(const std::string &service_name, const std::map<std::string, std::string> &data) {
|
||||
HomeassistantServiceResponse resp;
|
||||
HomeassistantActionRequest resp;
|
||||
resp.set_service(StringRef(service_name));
|
||||
resp.data.init(data.size());
|
||||
for (auto &it : data) {
|
||||
resp.data.emplace_back();
|
||||
auto &kv = resp.data.back();
|
||||
auto &kv = resp.data.emplace_back();
|
||||
kv.set_key(StringRef(it.first));
|
||||
kv.value = it.second;
|
||||
}
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
}
|
||||
|
||||
/** Fire an ESPHome event in Home Assistant.
|
||||
@@ -221,10 +221,10 @@ class CustomAPIDevice {
|
||||
* @param event_name The event to fire.
|
||||
*/
|
||||
void fire_homeassistant_event(const std::string &event_name) {
|
||||
HomeassistantServiceResponse resp;
|
||||
HomeassistantActionRequest resp;
|
||||
resp.set_service(StringRef(event_name));
|
||||
resp.is_event = true;
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
}
|
||||
|
||||
/** Fire an ESPHome event in Home Assistant.
|
||||
@@ -241,16 +241,16 @@ class CustomAPIDevice {
|
||||
* @param data The data for the event, mapping from string to string.
|
||||
*/
|
||||
void fire_homeassistant_event(const std::string &service_name, const std::map<std::string, std::string> &data) {
|
||||
HomeassistantServiceResponse resp;
|
||||
HomeassistantActionRequest resp;
|
||||
resp.set_service(StringRef(service_name));
|
||||
resp.is_event = true;
|
||||
resp.data.init(data.size());
|
||||
for (auto &it : data) {
|
||||
resp.data.emplace_back();
|
||||
auto &kv = resp.data.back();
|
||||
auto &kv = resp.data.emplace_back();
|
||||
kv.set_key(StringRef(it.first));
|
||||
kv.value = it.second;
|
||||
}
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
}
|
||||
#else
|
||||
template<typename T = void> void call_homeassistant_service(const std::string &service_name) {
|
||||
|
||||
@@ -3,10 +3,15 @@
|
||||
#include "api_server.h"
|
||||
#ifdef USE_API
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
#include <functional>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
#include "api_pb2.h"
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
#include "esphome/components/json/json_util.h"
|
||||
#endif
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include <vector>
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
@@ -36,66 +41,191 @@ template<typename... X> class TemplatableStringValue : public TemplatableValue<s
|
||||
|
||||
template<typename... Ts> class TemplatableKeyValuePair {
|
||||
public:
|
||||
// Default constructor needed for FixedVector::emplace_back()
|
||||
TemplatableKeyValuePair() = default;
|
||||
|
||||
// Keys are always string literals from YAML dictionary keys (e.g., "code", "event")
|
||||
// and never templatable values or lambdas. Only the value parameter can be a lambda/template.
|
||||
// Using pass-by-value with std::move allows optimal performance for both lvalues and rvalues.
|
||||
template<typename T> TemplatableKeyValuePair(std::string key, T value) : key(std::move(key)), value(value) {}
|
||||
|
||||
std::string key;
|
||||
TemplatableStringValue<Ts...> value;
|
||||
};
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
// Represents the response data from a Home Assistant action
|
||||
class ActionResponse {
|
||||
public:
|
||||
ActionResponse(bool success, std::string error_message = "")
|
||||
: success_(success), error_message_(std::move(error_message)) {}
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
ActionResponse(bool success, std::string error_message, const uint8_t *data, size_t data_len)
|
||||
: success_(success), error_message_(std::move(error_message)) {
|
||||
if (data == nullptr || data_len == 0)
|
||||
return;
|
||||
this->json_document_ = json::parse_json(data, data_len);
|
||||
}
|
||||
#endif
|
||||
|
||||
bool is_success() const { return this->success_; }
|
||||
const std::string &get_error_message() const { return this->error_message_; }
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
// Get data as parsed JSON object (const version returns read-only view)
|
||||
JsonObjectConst get_json() const { return this->json_document_.as<JsonObjectConst>(); }
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool success_;
|
||||
std::string error_message_;
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
JsonDocument json_document_;
|
||||
#endif
|
||||
};
|
||||
|
||||
// Callback type for action responses
|
||||
template<typename... Ts> using ActionResponseCallback = std::function<void(const ActionResponse &, Ts...)>;
|
||||
#endif
|
||||
|
||||
template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts...> {
|
||||
public:
|
||||
explicit HomeAssistantServiceCallAction(APIServer *parent, bool is_event) : parent_(parent), is_event_(is_event) {}
|
||||
explicit HomeAssistantServiceCallAction(APIServer *parent, bool is_event) : parent_(parent) {
|
||||
this->flags_.is_event = is_event;
|
||||
}
|
||||
|
||||
template<typename T> void set_service(T service) { this->service_ = service; }
|
||||
|
||||
// Initialize FixedVector members - called from Python codegen with compile-time known sizes.
|
||||
// Must be called before any add_* methods; capacity must match the number of subsequent add_* calls.
|
||||
void init_data(size_t count) { this->data_.init(count); }
|
||||
void init_data_template(size_t count) { this->data_template_.init(count); }
|
||||
void init_variables(size_t count) { this->variables_.init(count); }
|
||||
|
||||
// Keys are always string literals from the Python code generation (e.g., cg.add(var.add_data("tag_id", templ))).
|
||||
// The value parameter can be a lambda/template, but keys are never templatable.
|
||||
// Using pass-by-value allows the compiler to optimize for both lvalues and rvalues.
|
||||
template<typename T> void add_data(std::string key, T value) { this->data_.emplace_back(std::move(key), value); }
|
||||
template<typename T> void add_data_template(std::string key, T value) {
|
||||
this->data_template_.emplace_back(std::move(key), value);
|
||||
template<typename K, typename V> void add_data(K &&key, V &&value) {
|
||||
this->add_kv_(this->data_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
template<typename T> void add_variable(std::string key, T value) {
|
||||
this->variables_.emplace_back(std::move(key), value);
|
||||
template<typename K, typename V> void add_data_template(K &&key, V &&value) {
|
||||
this->add_kv_(this->data_template_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
template<typename K, typename V> void add_variable(K &&key, V &&value) {
|
||||
this->add_kv_(this->variables_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
template<typename T> void set_response_template(T response_template) {
|
||||
this->response_template_ = response_template;
|
||||
this->flags_.has_response_template = true;
|
||||
}
|
||||
|
||||
void set_wants_status() { this->flags_.wants_status = true; }
|
||||
void set_wants_response() { this->flags_.wants_response = true; }
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
Trigger<JsonObjectConst, Ts...> *get_success_trigger_with_response() const {
|
||||
return this->success_trigger_with_response_;
|
||||
}
|
||||
#endif
|
||||
Trigger<Ts...> *get_success_trigger() const { return this->success_trigger_; }
|
||||
Trigger<std::string, Ts...> *get_error_trigger() const { return this->error_trigger_; }
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
|
||||
void play(Ts... x) override {
|
||||
HomeassistantServiceResponse resp;
|
||||
HomeassistantActionRequest resp;
|
||||
std::string service_value = this->service_.value(x...);
|
||||
resp.set_service(StringRef(service_value));
|
||||
resp.is_event = this->is_event_;
|
||||
for (auto &it : this->data_) {
|
||||
resp.data.emplace_back();
|
||||
auto &kv = resp.data.back();
|
||||
kv.set_key(StringRef(it.key));
|
||||
kv.value = it.value.value(x...);
|
||||
resp.is_event = this->flags_.is_event;
|
||||
this->populate_service_map(resp.data, this->data_, x...);
|
||||
this->populate_service_map(resp.data_template, this->data_template_, x...);
|
||||
this->populate_service_map(resp.variables, this->variables_, x...);
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
if (this->flags_.wants_status) {
|
||||
// Generate a unique call ID for this service call
|
||||
static uint32_t call_id_counter = 1;
|
||||
uint32_t call_id = call_id_counter++;
|
||||
resp.call_id = call_id;
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
if (this->flags_.wants_response) {
|
||||
resp.wants_response = true;
|
||||
// Set response template if provided
|
||||
if (this->flags_.has_response_template) {
|
||||
std::string response_template_value = this->response_template_.value(x...);
|
||||
resp.response_template = response_template_value;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
auto captured_args = std::make_tuple(x...);
|
||||
this->parent_->register_action_response_callback(call_id, [this, captured_args](const ActionResponse &response) {
|
||||
std::apply(
|
||||
[this, &response](auto &&...args) {
|
||||
if (response.is_success()) {
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
if (this->flags_.wants_response) {
|
||||
this->success_trigger_with_response_->trigger(response.get_json(), args...);
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
this->success_trigger_->trigger(args...);
|
||||
}
|
||||
} else {
|
||||
this->error_trigger_->trigger(response.get_error_message(), args...);
|
||||
}
|
||||
},
|
||||
captured_args);
|
||||
});
|
||||
}
|
||||
for (auto &it : this->data_template_) {
|
||||
resp.data_template.emplace_back();
|
||||
auto &kv = resp.data_template.back();
|
||||
kv.set_key(StringRef(it.key));
|
||||
kv.value = it.value.value(x...);
|
||||
}
|
||||
for (auto &it : this->variables_) {
|
||||
resp.variables.emplace_back();
|
||||
auto &kv = resp.variables.back();
|
||||
kv.set_key(StringRef(it.key));
|
||||
kv.value = it.value.value(x...);
|
||||
}
|
||||
this->parent_->send_homeassistant_service_call(resp);
|
||||
#endif
|
||||
|
||||
this->parent_->send_homeassistant_action(resp);
|
||||
}
|
||||
|
||||
protected:
|
||||
// Helper to add key-value pairs to FixedVectors with perfect forwarding to avoid copies
|
||||
template<typename K, typename V> void add_kv_(FixedVector<TemplatableKeyValuePair<Ts...>> &vec, K &&key, V &&value) {
|
||||
auto &kv = vec.emplace_back();
|
||||
kv.key = std::forward<K>(key);
|
||||
kv.value = std::forward<V>(value);
|
||||
}
|
||||
|
||||
template<typename VectorType, typename SourceType>
|
||||
static void populate_service_map(VectorType &dest, SourceType &source, Ts... x) {
|
||||
dest.init(source.size());
|
||||
for (auto &it : source) {
|
||||
auto &kv = dest.emplace_back();
|
||||
kv.set_key(StringRef(it.key));
|
||||
kv.value = it.value.value(x...);
|
||||
}
|
||||
}
|
||||
|
||||
APIServer *parent_;
|
||||
bool is_event_;
|
||||
TemplatableStringValue<Ts...> service_{};
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> data_;
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> data_template_;
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> variables_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> data_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> data_template_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> variables_;
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
TemplatableStringValue<Ts...> response_template_{""};
|
||||
Trigger<JsonObjectConst, Ts...> *success_trigger_with_response_ = new Trigger<JsonObjectConst, Ts...>();
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
Trigger<Ts...> *success_trigger_ = new Trigger<Ts...>();
|
||||
Trigger<std::string, Ts...> *error_trigger_ = new Trigger<std::string, Ts...>();
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
|
||||
struct Flags {
|
||||
uint8_t is_event : 1;
|
||||
uint8_t wants_status : 1;
|
||||
uint8_t wants_response : 1;
|
||||
uint8_t has_response_template : 1;
|
||||
uint8_t reserved : 5;
|
||||
} flags_{0};
|
||||
};
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
||||
@@ -7,6 +7,69 @@ namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.proto";
|
||||
|
||||
uint32_t ProtoDecodableMessage::count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id) {
|
||||
uint32_t count = 0;
|
||||
const uint8_t *ptr = buffer;
|
||||
const uint8_t *end = buffer + length;
|
||||
|
||||
while (ptr < end) {
|
||||
uint32_t consumed;
|
||||
|
||||
// Parse field header (tag)
|
||||
auto res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
break; // Invalid data, stop counting
|
||||
}
|
||||
|
||||
uint32_t tag = res->as_uint32();
|
||||
uint32_t field_type = tag & WIRE_TYPE_MASK;
|
||||
uint32_t field_id = tag >> 3;
|
||||
ptr += consumed;
|
||||
|
||||
// Count if this is the target field
|
||||
if (field_id == target_field_id) {
|
||||
count++;
|
||||
}
|
||||
|
||||
// Skip field data based on wire type
|
||||
switch (field_type) {
|
||||
case WIRE_TYPE_VARINT: { // VarInt - parse and skip
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
return count; // Invalid data, return what we have
|
||||
}
|
||||
ptr += consumed;
|
||||
break;
|
||||
}
|
||||
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited - parse length and skip data
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
return count;
|
||||
}
|
||||
uint32_t field_length = res->as_uint32();
|
||||
ptr += consumed;
|
||||
if (ptr + field_length > end) {
|
||||
return count; // Out of bounds
|
||||
}
|
||||
ptr += field_length;
|
||||
break;
|
||||
}
|
||||
case WIRE_TYPE_FIXED32: { // 32-bit - skip 4 bytes
|
||||
if (ptr + 4 > end) {
|
||||
return count;
|
||||
}
|
||||
ptr += 4;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
// Unknown wire type, can't continue
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
const uint8_t *ptr = buffer;
|
||||
const uint8_t *end = buffer + length;
|
||||
@@ -22,12 +85,12 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
}
|
||||
|
||||
uint32_t tag = res->as_uint32();
|
||||
uint32_t field_type = tag & 0b111;
|
||||
uint32_t field_type = tag & WIRE_TYPE_MASK;
|
||||
uint32_t field_id = tag >> 3;
|
||||
ptr += consumed;
|
||||
|
||||
switch (field_type) {
|
||||
case 0: { // VarInt
|
||||
case WIRE_TYPE_VARINT: { // VarInt
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
ESP_LOGV(TAG, "Invalid VarInt at offset %ld", (long) (ptr - buffer));
|
||||
@@ -39,7 +102,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
ptr += consumed;
|
||||
break;
|
||||
}
|
||||
case 2: { // Length-delimited
|
||||
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
ESP_LOGV(TAG, "Invalid Length Delimited at offset %ld", (long) (ptr - buffer));
|
||||
@@ -57,7 +120,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
ptr += field_length;
|
||||
break;
|
||||
}
|
||||
case 5: { // 32-bit
|
||||
case WIRE_TYPE_FIXED32: { // 32-bit
|
||||
if (ptr + 4 > end) {
|
||||
ESP_LOGV(TAG, "Out-of-bounds Fixed32-bit at offset %ld", (long) (ptr - buffer));
|
||||
return;
|
||||
|
||||
@@ -15,6 +15,13 @@
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
// Protocol Buffer wire type constants
|
||||
// See https://protobuf.dev/programming-guides/encoding/#structure
|
||||
constexpr uint8_t WIRE_TYPE_VARINT = 0; // int32, int64, uint32, uint64, sint32, sint64, bool, enum
|
||||
constexpr uint8_t WIRE_TYPE_LENGTH_DELIMITED = 2; // string, bytes, embedded messages, packed repeated fields
|
||||
constexpr uint8_t WIRE_TYPE_FIXED32 = 5; // fixed32, sfixed32, float
|
||||
constexpr uint8_t WIRE_TYPE_MASK = 0b111; // Mask to extract wire type from tag
|
||||
|
||||
// Helper functions for ZigZag encoding/decoding
|
||||
inline constexpr uint32_t encode_zigzag32(int32_t value) {
|
||||
return (static_cast<uint32_t>(value) << 1) ^ (static_cast<uint32_t>(value >> 31));
|
||||
@@ -182,6 +189,10 @@ class ProtoLengthDelimited {
|
||||
explicit ProtoLengthDelimited(const uint8_t *value, size_t length) : value_(value), length_(length) {}
|
||||
std::string as_string() const { return std::string(reinterpret_cast<const char *>(this->value_), this->length_); }
|
||||
|
||||
// Direct access to raw data without string allocation
|
||||
const uint8_t *data() const { return this->value_; }
|
||||
size_t size() const { return this->length_; }
|
||||
|
||||
/**
|
||||
* Decode the length-delimited data into an existing ProtoDecodableMessage instance.
|
||||
*
|
||||
@@ -237,7 +248,7 @@ class ProtoWriteBuffer {
|
||||
* Following https://protobuf.dev/programming-guides/encoding/#structure
|
||||
*/
|
||||
void encode_field_raw(uint32_t field_id, uint32_t type) {
|
||||
uint32_t val = (field_id << 3) | (type & 0b111);
|
||||
uint32_t val = (field_id << 3) | (type & WIRE_TYPE_MASK);
|
||||
this->encode_varint_raw(val);
|
||||
}
|
||||
void encode_string(uint32_t field_id, const char *string, size_t len, bool force = false) {
|
||||
@@ -350,7 +361,18 @@ class ProtoMessage {
|
||||
// Base class for messages that support decoding
|
||||
class ProtoDecodableMessage : public ProtoMessage {
|
||||
public:
|
||||
void decode(const uint8_t *buffer, size_t length);
|
||||
virtual void decode(const uint8_t *buffer, size_t length);
|
||||
|
||||
/**
|
||||
* Count occurrences of a repeated field in a protobuf buffer.
|
||||
* This is a lightweight scan that only parses tags and skips field data.
|
||||
*
|
||||
* @param buffer Pointer to the protobuf buffer
|
||||
* @param length Length of the buffer in bytes
|
||||
* @param target_field_id The field ID to count
|
||||
* @return Number of times the field appears in the buffer
|
||||
*/
|
||||
static uint32_t count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id);
|
||||
|
||||
protected:
|
||||
virtual bool decode_varint(uint32_t field_id, ProtoVarInt value) { return false; }
|
||||
@@ -478,7 +500,7 @@ class ProtoSize {
|
||||
* @return The number of bytes needed to encode the field ID and wire type
|
||||
*/
|
||||
static constexpr uint32_t field(uint32_t field_id, uint32_t type) {
|
||||
uint32_t tag = (field_id << 3) | (type & 0b111);
|
||||
uint32_t tag = (field_id << 3) | (type & WIRE_TYPE_MASK);
|
||||
return varint(tag);
|
||||
}
|
||||
|
||||
@@ -745,13 +767,29 @@ class ProtoSize {
|
||||
template<typename MessageType>
|
||||
inline void add_repeated_message(uint32_t field_id_size, const std::vector<MessageType> &messages) {
|
||||
// Skip if the vector is empty
|
||||
if (messages.empty()) {
|
||||
return;
|
||||
if (!messages.empty()) {
|
||||
// Use the force version for all messages in the repeated field
|
||||
for (const auto &message : messages) {
|
||||
add_message_object_force(field_id_size, message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use the force version for all messages in the repeated field
|
||||
for (const auto &message : messages) {
|
||||
add_message_object_force(field_id_size, message);
|
||||
/**
|
||||
* @brief Calculates and adds the sizes of all messages in a repeated field to the total message size (FixedVector
|
||||
* version)
|
||||
*
|
||||
* @tparam MessageType The type of the nested messages in the FixedVector
|
||||
* @param messages FixedVector of message objects
|
||||
*/
|
||||
template<typename MessageType>
|
||||
inline void add_repeated_message(uint32_t field_id_size, const FixedVector<MessageType> &messages) {
|
||||
// Skip if the fixed vector is empty
|
||||
if (!messages.empty()) {
|
||||
// Use the force version for all messages in the repeated field
|
||||
for (const auto &message : messages) {
|
||||
add_message_object_force(field_id_size, message);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -827,7 +865,7 @@ class ProtoService {
|
||||
}
|
||||
|
||||
// Authentication helper methods
|
||||
bool check_connection_setup_() {
|
||||
inline bool check_connection_setup_() {
|
||||
if (!this->is_connection_setup()) {
|
||||
this->on_no_setup_connection();
|
||||
return false;
|
||||
@@ -835,7 +873,7 @@ class ProtoService {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool check_authenticated_() {
|
||||
inline bool check_authenticated_() {
|
||||
#ifdef USE_API_PASSWORD
|
||||
if (!this->check_connection_setup_()) {
|
||||
return false;
|
||||
|
||||
@@ -12,16 +12,16 @@ template<> int32_t get_execute_arg_value<int32_t>(const ExecuteServiceArgument &
|
||||
template<> float get_execute_arg_value<float>(const ExecuteServiceArgument &arg) { return arg.float_; }
|
||||
template<> std::string get_execute_arg_value<std::string>(const ExecuteServiceArgument &arg) { return arg.string_; }
|
||||
template<> std::vector<bool> get_execute_arg_value<std::vector<bool>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.bool_array;
|
||||
return std::vector<bool>(arg.bool_array.begin(), arg.bool_array.end());
|
||||
}
|
||||
template<> std::vector<int32_t> get_execute_arg_value<std::vector<int32_t>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.int_array;
|
||||
return std::vector<int32_t>(arg.int_array.begin(), arg.int_array.end());
|
||||
}
|
||||
template<> std::vector<float> get_execute_arg_value<std::vector<float>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.float_array;
|
||||
return std::vector<float>(arg.float_array.begin(), arg.float_array.end());
|
||||
}
|
||||
template<> std::vector<std::string> get_execute_arg_value<std::vector<std::string>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.string_array;
|
||||
return std::vector<std::string>(arg.string_array.begin(), arg.string_array.end());
|
||||
}
|
||||
|
||||
template<> enums::ServiceArgType to_service_arg_type<bool>() { return enums::SERVICE_ARG_TYPE_BOOL; }
|
||||
|
||||
@@ -35,9 +35,9 @@ template<typename... Ts> class UserServiceBase : public UserServiceDescriptor {
|
||||
msg.set_name(StringRef(this->name_));
|
||||
msg.key = this->key_;
|
||||
std::array<enums::ServiceArgType, sizeof...(Ts)> arg_types = {to_service_arg_type<Ts>()...};
|
||||
for (int i = 0; i < sizeof...(Ts); i++) {
|
||||
msg.args.emplace_back();
|
||||
auto &arg = msg.args.back();
|
||||
msg.args.init(sizeof...(Ts));
|
||||
for (size_t i = 0; i < sizeof...(Ts); i++) {
|
||||
auto &arg = msg.args.emplace_back();
|
||||
arg.type = arg_types[i];
|
||||
arg.set_name(StringRef(this->arg_names_[i]));
|
||||
}
|
||||
@@ -55,7 +55,7 @@ template<typename... Ts> class UserServiceBase : public UserServiceDescriptor {
|
||||
|
||||
protected:
|
||||
virtual void execute(Ts... x) = 0;
|
||||
template<int... S> void execute_(std::vector<ExecuteServiceArgument> args, seq<S...> type) {
|
||||
template<typename ArgsContainer, int... S> void execute_(const ArgsContainer &args, seq<S...> type) {
|
||||
this->execute((get_execute_arg_value<Ts>(args[S]))...);
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import esphome.codegen as cg
|
||||
from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_CLEAR,
|
||||
CONF_GAIN,
|
||||
CONF_ID,
|
||||
DEVICE_CLASS_ILLUMINANCE,
|
||||
@@ -29,7 +30,6 @@ CONF_F5 = "f5"
|
||||
CONF_F6 = "f6"
|
||||
CONF_F7 = "f7"
|
||||
CONF_F8 = "f8"
|
||||
CONF_CLEAR = "clear"
|
||||
CONF_NIR = "nir"
|
||||
|
||||
UNIT_COUNTS = "#"
|
||||
|
||||
@@ -165,4 +165,4 @@ def final_validate_audio_schema(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
cg.add_library("esphome/esp-audio-libs", "1.1.4")
|
||||
cg.add_library("esphome/esp-audio-libs", "2.0.1")
|
||||
|
||||
@@ -57,7 +57,7 @@ const char *audio_file_type_to_string(AudioFileType file_type) {
|
||||
void scale_audio_samples(const int16_t *audio_samples, int16_t *output_buffer, int16_t scale_factor,
|
||||
size_t samples_to_scale) {
|
||||
// Note the assembly dsps_mulc function has audio glitches if the input and output buffers are the same.
|
||||
for (int i = 0; i < samples_to_scale; i++) {
|
||||
for (size_t i = 0; i < samples_to_scale; i++) {
|
||||
int32_t acc = (int32_t) audio_samples[i] * (int32_t) scale_factor;
|
||||
output_buffer[i] = (int16_t) (acc >> 15);
|
||||
}
|
||||
|
||||
@@ -229,18 +229,18 @@ FileDecoderState AudioDecoder::decode_flac_() {
|
||||
auto result = this->flac_decoder_->read_header(this->input_transfer_buffer_->get_buffer_start(),
|
||||
this->input_transfer_buffer_->available());
|
||||
|
||||
if (result == esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
|
||||
return FileDecoderState::POTENTIALLY_FAILED;
|
||||
}
|
||||
|
||||
if (result != esp_audio_libs::flac::FLAC_DECODER_SUCCESS) {
|
||||
// Couldn't read FLAC header
|
||||
if (result > esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
|
||||
// Serrious error reading FLAC header, there is no recovery
|
||||
return FileDecoderState::FAILED;
|
||||
}
|
||||
|
||||
size_t bytes_consumed = this->flac_decoder_->get_bytes_index();
|
||||
this->input_transfer_buffer_->decrease_buffer_length(bytes_consumed);
|
||||
|
||||
if (result == esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
|
||||
return FileDecoderState::MORE_TO_PROCESS;
|
||||
}
|
||||
|
||||
// Reallocate the output transfer buffer to the smallest necessary size
|
||||
this->free_buffer_required_ = flac_decoder_->get_output_buffer_size_bytes();
|
||||
if (!this->output_transfer_buffer_->reallocate(this->free_buffer_required_)) {
|
||||
@@ -256,9 +256,9 @@ FileDecoderState AudioDecoder::decode_flac_() {
|
||||
}
|
||||
|
||||
uint32_t output_samples = 0;
|
||||
auto result = this->flac_decoder_->decode_frame(
|
||||
this->input_transfer_buffer_->get_buffer_start(), this->input_transfer_buffer_->available(),
|
||||
reinterpret_cast<int16_t *>(this->output_transfer_buffer_->get_buffer_end()), &output_samples);
|
||||
auto result = this->flac_decoder_->decode_frame(this->input_transfer_buffer_->get_buffer_start(),
|
||||
this->input_transfer_buffer_->available(),
|
||||
this->output_transfer_buffer_->get_buffer_end(), &output_samples);
|
||||
|
||||
if (result == esp_audio_libs::flac::FLAC_DECODER_ERROR_OUT_OF_DATA) {
|
||||
// Not an issue, just needs more data that we'll get next time.
|
||||
|
||||
0
esphome/components/bh1900nux/__init__.py
Normal file
0
esphome/components/bh1900nux/__init__.py
Normal file
54
esphome/components/bh1900nux/bh1900nux.cpp
Normal file
54
esphome/components/bh1900nux/bh1900nux.cpp
Normal file
@@ -0,0 +1,54 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "bh1900nux.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace bh1900nux {
|
||||
|
||||
static const char *const TAG = "bh1900nux.sensor";
|
||||
|
||||
// I2C Registers
|
||||
static const uint8_t TEMPERATURE_REG = 0x00;
|
||||
static const uint8_t CONFIG_REG = 0x01; // Not used and supported yet
|
||||
static const uint8_t TEMPERATURE_LOW_REG = 0x02; // Not used and supported yet
|
||||
static const uint8_t TEMPERATURE_HIGH_REG = 0x03; // Not used and supported yet
|
||||
static const uint8_t SOFT_RESET_REG = 0x04;
|
||||
|
||||
// I2C Command payloads
|
||||
static const uint8_t SOFT_RESET_PAYLOAD = 0x01; // Soft Reset value
|
||||
|
||||
static const float SENSOR_RESOLUTION = 0.0625f; // Sensor resolution per bit in degrees celsius
|
||||
|
||||
void BH1900NUXSensor::setup() {
|
||||
// Initialize I2C device
|
||||
i2c::ErrorCode result_code =
|
||||
this->write_register(SOFT_RESET_REG, &SOFT_RESET_PAYLOAD, 1); // Software Reset to check communication
|
||||
if (result_code != i2c::ERROR_OK) {
|
||||
this->mark_failed(ESP_LOG_MSG_COMM_FAIL);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void BH1900NUXSensor::update() {
|
||||
uint8_t temperature_raw[2];
|
||||
if (this->read_register(TEMPERATURE_REG, temperature_raw, 2) != i2c::ERROR_OK) {
|
||||
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
|
||||
return;
|
||||
}
|
||||
|
||||
// Combined raw value, unsigned and unaligned 16 bit
|
||||
// Temperature is represented in just 12 bits, shift needed
|
||||
int16_t raw_temperature_register_value = encode_uint16(temperature_raw[0], temperature_raw[1]);
|
||||
raw_temperature_register_value >>= 4;
|
||||
float temperature_value = raw_temperature_register_value * SENSOR_RESOLUTION; // Apply sensor resolution
|
||||
|
||||
this->publish_state(temperature_value);
|
||||
}
|
||||
|
||||
void BH1900NUXSensor::dump_config() {
|
||||
LOG_SENSOR("", "BH1900NUX", this);
|
||||
LOG_I2C_DEVICE(this);
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
}
|
||||
|
||||
} // namespace bh1900nux
|
||||
} // namespace esphome
|
||||
18
esphome/components/bh1900nux/bh1900nux.h
Normal file
18
esphome/components/bh1900nux/bh1900nux.h
Normal file
@@ -0,0 +1,18 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace bh1900nux {
|
||||
|
||||
class BH1900NUXSensor : public sensor::Sensor, public PollingComponent, public i2c::I2CDevice {
|
||||
public:
|
||||
void setup() override;
|
||||
void update() override;
|
||||
void dump_config() override;
|
||||
};
|
||||
|
||||
} // namespace bh1900nux
|
||||
} // namespace esphome
|
||||
34
esphome/components/bh1900nux/sensor.py
Normal file
34
esphome/components/bh1900nux/sensor.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_CELSIUS,
|
||||
)
|
||||
|
||||
DEPENDENCIES = ["i2c"]
|
||||
CODEOWNERS = ["@B48D81EFCC"]
|
||||
|
||||
sensor_ns = cg.esphome_ns.namespace("bh1900nux")
|
||||
BH1900NUXSensor = sensor_ns.class_(
|
||||
"BH1900NUXSensor", cg.PollingComponent, i2c.I2CDevice
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
sensor.sensor_schema(
|
||||
BH1900NUXSensor,
|
||||
accuracy_decimals=1,
|
||||
unit_of_measurement=UNIT_CELSIUS,
|
||||
device_class=DEVICE_CLASS_TEMPERATURE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
)
|
||||
.extend(cv.polling_component_schema("60s"))
|
||||
.extend(i2c.i2c_device_schema(0x48))
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = await sensor.new_sensor(config)
|
||||
await cg.register_component(var, config)
|
||||
await i2c.register_i2c_device(var, config)
|
||||
@@ -97,10 +97,10 @@ void BL0906::handle_actions_() {
|
||||
return;
|
||||
}
|
||||
ActionCallbackFuncPtr ptr_func = nullptr;
|
||||
for (int i = 0; i < this->action_queue_.size(); i++) {
|
||||
for (size_t i = 0; i < this->action_queue_.size(); i++) {
|
||||
ptr_func = this->action_queue_[i];
|
||||
if (ptr_func) {
|
||||
ESP_LOGI(TAG, "HandleActionCallback[%d]", i);
|
||||
ESP_LOGI(TAG, "HandleActionCallback[%zu]", i);
|
||||
(this->*ptr_func)();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ void BL0942::loop() {
|
||||
if (!avail) {
|
||||
return;
|
||||
}
|
||||
if (avail < sizeof(buffer)) {
|
||||
if (static_cast<size_t>(avail) < sizeof(buffer)) {
|
||||
if (!this->rx_start_) {
|
||||
this->rx_start_ = millis();
|
||||
} else if (millis() > this->rx_start_ + PKT_TIMEOUT_MS) {
|
||||
@@ -148,7 +148,7 @@ void BL0942::setup() {
|
||||
|
||||
this->write_reg_(BL0942_REG_USR_WRPROT, 0);
|
||||
|
||||
if (this->read_reg_(BL0942_REG_MODE) != mode)
|
||||
if (static_cast<uint32_t>(this->read_reg_(BL0942_REG_MODE)) != mode)
|
||||
this->status_set_warning(LOG_STR("BL0942 setup failed!"));
|
||||
|
||||
this->flush();
|
||||
|
||||
@@ -116,7 +116,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
.extend(esp32_ble_tracker.ESP_BLE_DEVICE_SCHEMA),
|
||||
esp32_ble_tracker.consume_connection_slots(1, "ble_client"),
|
||||
esp32_ble.consume_connection_slots(1, "ble_client"),
|
||||
)
|
||||
|
||||
CONF_BLE_CLIENT_ID = "ble_client_id"
|
||||
|
||||
29
esphome/components/ble_nus/__init__.py
Normal file
29
esphome/components/ble_nus/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.zephyr import zephyr_add_prj_conf
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_LOGS, CONF_TYPE
|
||||
|
||||
AUTO_LOAD = ["zephyr_ble_server"]
|
||||
CODEOWNERS = ["@tomaszduda23"]
|
||||
|
||||
ble_nus_ns = cg.esphome_ns.namespace("ble_nus")
|
||||
BLENUS = ble_nus_ns.class_("BLENUS", cg.Component)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BLENUS),
|
||||
cv.Optional(CONF_TYPE, default=CONF_LOGS): cv.one_of(
|
||||
*[CONF_LOGS], lower=True
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_with_framework("zephyr"),
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
zephyr_add_prj_conf("BT_NUS", True)
|
||||
cg.add(var.set_expose_log(config[CONF_TYPE] == CONF_LOGS))
|
||||
await cg.register_component(var, config)
|
||||
157
esphome/components/ble_nus/ble_nus.cpp
Normal file
157
esphome/components/ble_nus/ble_nus.cpp
Normal file
@@ -0,0 +1,157 @@
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "ble_nus.h"
|
||||
#include <zephyr/kernel.h>
|
||||
#include <bluetooth/services/nus.h>
|
||||
#include "esphome/core/log.h"
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
#include "esphome/core/application.h"
|
||||
#endif
|
||||
#include <zephyr/sys/ring_buffer.h>
|
||||
|
||||
namespace esphome::ble_nus {
|
||||
|
||||
constexpr size_t BLE_TX_BUF_SIZE = 2048;
|
||||
|
||||
// NOLINTBEGIN(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
BLENUS *global_ble_nus;
|
||||
RING_BUF_DECLARE(global_ble_tx_ring_buf, BLE_TX_BUF_SIZE);
|
||||
// NOLINTEND(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
static const char *const TAG = "ble_nus";
|
||||
|
||||
size_t BLENUS::write_array(const uint8_t *data, size_t len) {
|
||||
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
|
||||
return 0;
|
||||
}
|
||||
return ring_buf_put(&global_ble_tx_ring_buf, data, len);
|
||||
}
|
||||
|
||||
void BLENUS::connected(bt_conn *conn, uint8_t err) {
|
||||
if (err == 0) {
|
||||
global_ble_nus->conn_.store(bt_conn_ref(conn));
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::disconnected(bt_conn *conn, uint8_t reason) {
|
||||
if (global_ble_nus->conn_) {
|
||||
bt_conn_unref(global_ble_nus->conn_.load());
|
||||
// Connection array is global static.
|
||||
// Reference can be kept even if disconnected.
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::tx_callback(bt_conn *conn) {
|
||||
atomic_cas(&global_ble_nus->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
ESP_LOGVV(TAG, "Sent operation completed");
|
||||
}
|
||||
|
||||
void BLENUS::send_enabled_callback(bt_nus_send_status status) {
|
||||
switch (status) {
|
||||
case BT_NUS_SEND_STATUS_ENABLED:
|
||||
atomic_set(&global_ble_nus->tx_status_, TX_ENABLED);
|
||||
#ifdef USE_LOGGER
|
||||
if (global_ble_nus->expose_log_) {
|
||||
App.schedule_dump_config();
|
||||
}
|
||||
#endif
|
||||
ESP_LOGD(TAG, "NUS notification has been enabled");
|
||||
break;
|
||||
case BT_NUS_SEND_STATUS_DISABLED:
|
||||
atomic_set(&global_ble_nus->tx_status_, TX_DISABLED);
|
||||
ESP_LOGD(TAG, "NUS notification has been disabled");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void BLENUS::rx_callback(bt_conn *conn, const uint8_t *const data, uint16_t len) {
|
||||
ESP_LOGD(TAG, "Received %d bytes.", len);
|
||||
}
|
||||
|
||||
void BLENUS::setup() {
|
||||
bt_nus_cb callbacks = {
|
||||
.received = rx_callback,
|
||||
.sent = tx_callback,
|
||||
.send_enabled = send_enabled_callback,
|
||||
};
|
||||
|
||||
bt_nus_init(&callbacks);
|
||||
|
||||
static bt_conn_cb conn_callbacks = {
|
||||
.connected = BLENUS::connected,
|
||||
.disconnected = BLENUS::disconnected,
|
||||
};
|
||||
|
||||
bt_conn_cb_register(&conn_callbacks);
|
||||
|
||||
global_ble_nus = this;
|
||||
#ifdef USE_LOGGER
|
||||
if (logger::global_logger != nullptr && this->expose_log_) {
|
||||
logger::global_logger->add_on_log_callback(
|
||||
[this](int level, const char *tag, const char *message, size_t message_len) {
|
||||
this->write_array(reinterpret_cast<const uint8_t *>(message), message_len);
|
||||
const char c = '\n';
|
||||
this->write_array(reinterpret_cast<const uint8_t *>(&c), 1);
|
||||
});
|
||||
}
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void BLENUS::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "ble nus:");
|
||||
ESP_LOGCONFIG(TAG, " log: %s", YESNO(this->expose_log_));
|
||||
uint32_t mtu = 0;
|
||||
bt_conn *conn = this->conn_.load();
|
||||
if (conn) {
|
||||
mtu = bt_nus_get_mtu(conn);
|
||||
}
|
||||
ESP_LOGCONFIG(TAG, " MTU: %u", mtu);
|
||||
}
|
||||
|
||||
void BLENUS::loop() {
|
||||
if (ring_buf_is_empty(&global_ble_tx_ring_buf)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!atomic_cas(&this->tx_status_, TX_ENABLED, TX_BUSY)) {
|
||||
if (atomic_get(&this->tx_status_) == TX_DISABLED) {
|
||||
ring_buf_reset(&global_ble_tx_ring_buf);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
bt_conn *conn = this->conn_.load();
|
||||
if (conn) {
|
||||
conn = bt_conn_ref(conn);
|
||||
}
|
||||
|
||||
if (nullptr == conn) {
|
||||
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t req_len = bt_nus_get_mtu(conn);
|
||||
|
||||
uint8_t *buf;
|
||||
uint32_t size = ring_buf_get_claim(&global_ble_tx_ring_buf, &buf, req_len);
|
||||
|
||||
int err, err2;
|
||||
|
||||
err = bt_nus_send(conn, buf, size);
|
||||
err2 = ring_buf_get_finish(&global_ble_tx_ring_buf, size);
|
||||
if (err2) {
|
||||
// It should no happen.
|
||||
ESP_LOGE(TAG, "Size %u exceeds valid bytes in the ring buffer (%d error)", size, err2);
|
||||
}
|
||||
if (err == 0) {
|
||||
ESP_LOGVV(TAG, "Sent %d bytes", size);
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Failed to send %d bytes (%d error)", size, err);
|
||||
atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED);
|
||||
}
|
||||
bt_conn_unref(conn);
|
||||
}
|
||||
|
||||
} // namespace esphome::ble_nus
|
||||
#endif
|
||||
37
esphome/components/ble_nus/ble_nus.h
Normal file
37
esphome/components/ble_nus/ble_nus.h
Normal file
@@ -0,0 +1,37 @@
|
||||
#pragma once
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include <shell/shell_bt_nus.h>
|
||||
#include <atomic>
|
||||
|
||||
namespace esphome::ble_nus {
|
||||
|
||||
class BLENUS : public Component {
|
||||
enum TxStatus {
|
||||
TX_DISABLED,
|
||||
TX_ENABLED,
|
||||
TX_BUSY,
|
||||
};
|
||||
|
||||
public:
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void loop() override;
|
||||
size_t write_array(const uint8_t *data, size_t len);
|
||||
void set_expose_log(bool expose_log) { this->expose_log_ = expose_log; }
|
||||
|
||||
protected:
|
||||
static void send_enabled_callback(bt_nus_send_status status);
|
||||
static void tx_callback(bt_conn *conn);
|
||||
static void rx_callback(bt_conn *conn, const uint8_t *data, uint16_t len);
|
||||
static void connected(bt_conn *conn, uint8_t err);
|
||||
static void disconnected(bt_conn *conn, uint8_t reason);
|
||||
|
||||
std::atomic<bt_conn *> conn_ = nullptr;
|
||||
bool expose_log_ = false;
|
||||
atomic_t tx_status_ = ATOMIC_INIT(TX_DISABLED);
|
||||
};
|
||||
|
||||
} // namespace esphome::ble_nus
|
||||
#endif
|
||||
@@ -6,8 +6,6 @@ from esphome.components.esp32 import add_idf_sdkconfig_option
|
||||
from esphome.components.esp32_ble import BTLoggers
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ACTIVE, CONF_ID
|
||||
from esphome.core import CORE
|
||||
from esphome.log import AnsiFore, color
|
||||
|
||||
AUTO_LOAD = ["esp32_ble_client", "esp32_ble_tracker"]
|
||||
DEPENDENCIES = ["api", "esp32"]
|
||||
@@ -44,29 +42,7 @@ def validate_connections(config):
|
||||
)
|
||||
elif config[CONF_ACTIVE]:
|
||||
connection_slots: int = config[CONF_CONNECTION_SLOTS]
|
||||
esp32_ble_tracker.consume_connection_slots(connection_slots, "bluetooth_proxy")(
|
||||
config
|
||||
)
|
||||
|
||||
# Warn about connection slot waste when using Arduino framework
|
||||
if CORE.using_arduino and connection_slots:
|
||||
_LOGGER.warning(
|
||||
"Bluetooth Proxy with active connections on Arduino framework has suboptimal performance.\n"
|
||||
"If BLE connections fail, they can waste connection slots for 10 seconds because\n"
|
||||
"Arduino doesn't allow configuring the BLE connection timeout (fixed at 30s).\n"
|
||||
"ESP-IDF framework allows setting it to 20s to match client timeouts.\n"
|
||||
"\n"
|
||||
"To switch to ESP-IDF, add this to your YAML:\n"
|
||||
" esp32:\n"
|
||||
" framework:\n"
|
||||
" type: esp-idf\n"
|
||||
"\n"
|
||||
"For detailed migration instructions, see:\n"
|
||||
"%s",
|
||||
color(
|
||||
AnsiFore.BLUE, "https://esphome.io/guides/esp32_arduino_to_idf.html"
|
||||
),
|
||||
)
|
||||
esp32_ble.consume_connection_slots(connection_slots, "bluetooth_proxy")(config)
|
||||
|
||||
return {
|
||||
**config,
|
||||
@@ -81,19 +57,17 @@ CONFIG_SCHEMA = cv.All(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BluetoothProxy),
|
||||
cv.Optional(CONF_ACTIVE, default=True): cv.boolean,
|
||||
cv.SplitDefault(CONF_CACHE_SERVICES, esp32_idf=True): cv.All(
|
||||
cv.only_with_esp_idf, cv.boolean
|
||||
),
|
||||
cv.Optional(CONF_CACHE_SERVICES, default=True): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_CONNECTION_SLOTS,
|
||||
default=DEFAULT_CONNECTION_SLOTS,
|
||||
): cv.All(
|
||||
cv.positive_int,
|
||||
cv.Range(min=1, max=esp32_ble_tracker.max_connections()),
|
||||
cv.Range(min=1, max=esp32_ble.IDF_MAX_CONNECTIONS),
|
||||
),
|
||||
cv.Optional(CONF_CONNECTIONS): cv.All(
|
||||
cv.ensure_list(CONNECTION_SCHEMA),
|
||||
cv.Length(min=1, max=esp32_ble_tracker.max_connections()),
|
||||
cv.Length(min=1, max=esp32_ble.IDF_MAX_CONNECTIONS),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -230,8 +230,8 @@ void BluetoothConnection::send_service_for_discovery_() {
|
||||
service_resp.handle = service_result.start_handle;
|
||||
|
||||
if (total_char_count > 0) {
|
||||
// Reserve space and process characteristics
|
||||
service_resp.characteristics.reserve(total_char_count);
|
||||
// Initialize FixedVector with exact count and process characteristics
|
||||
service_resp.characteristics.init(total_char_count);
|
||||
uint16_t char_offset = 0;
|
||||
esp_gattc_char_elem_t char_result;
|
||||
while (true) { // characteristics
|
||||
@@ -253,9 +253,7 @@ void BluetoothConnection::send_service_for_discovery_() {
|
||||
|
||||
service_resp.characteristics.emplace_back();
|
||||
auto &characteristic_resp = service_resp.characteristics.back();
|
||||
|
||||
fill_gatt_uuid(characteristic_resp.uuid, characteristic_resp.short_uuid, char_result.uuid, use_efficient_uuids);
|
||||
|
||||
characteristic_resp.handle = char_result.char_handle;
|
||||
characteristic_resp.properties = char_result.properties;
|
||||
char_offset++;
|
||||
@@ -271,12 +269,11 @@ void BluetoothConnection::send_service_for_discovery_() {
|
||||
return;
|
||||
}
|
||||
if (total_desc_count == 0) {
|
||||
// No descriptors, continue to next characteristic
|
||||
continue;
|
||||
}
|
||||
|
||||
// Reserve space and process descriptors
|
||||
characteristic_resp.descriptors.reserve(total_desc_count);
|
||||
// Initialize FixedVector with exact count and process descriptors
|
||||
characteristic_resp.descriptors.init(total_desc_count);
|
||||
uint16_t desc_offset = 0;
|
||||
esp_gattc_descr_elem_t desc_result;
|
||||
while (true) { // descriptors
|
||||
@@ -297,9 +294,7 @@ void BluetoothConnection::send_service_for_discovery_() {
|
||||
|
||||
characteristic_resp.descriptors.emplace_back();
|
||||
auto &descriptor_resp = characteristic_resp.descriptors.back();
|
||||
|
||||
fill_gatt_uuid(descriptor_resp.uuid, descriptor_resp.short_uuid, desc_result.uuid, use_efficient_uuids);
|
||||
|
||||
descriptor_resp.handle = desc_result.handle;
|
||||
desc_offset++;
|
||||
}
|
||||
@@ -514,7 +509,8 @@ esp_err_t BluetoothConnection::read_characteristic(uint16_t handle) {
|
||||
return this->check_and_log_error_("esp_ble_gattc_read_char", err);
|
||||
}
|
||||
|
||||
esp_err_t BluetoothConnection::write_characteristic(uint16_t handle, const std::string &data, bool response) {
|
||||
esp_err_t BluetoothConnection::write_characteristic(uint16_t handle, const uint8_t *data, size_t length,
|
||||
bool response) {
|
||||
if (!this->connected()) {
|
||||
this->log_gatt_not_connected_("write", "characteristic");
|
||||
return ESP_GATT_NOT_CONNECTED;
|
||||
@@ -522,8 +518,11 @@ esp_err_t BluetoothConnection::write_characteristic(uint16_t handle, const std::
|
||||
ESP_LOGV(TAG, "[%d] [%s] Writing GATT characteristic handle %d", this->connection_index_, this->address_str_.c_str(),
|
||||
handle);
|
||||
|
||||
// ESP-IDF's API requires a non-const uint8_t* but it doesn't modify the data
|
||||
// The BTC layer immediately copies the data to its own buffer (see btc_gattc.c)
|
||||
// const_cast is safe here and was previously hidden by a C-style cast
|
||||
esp_err_t err =
|
||||
esp_ble_gattc_write_char(this->gattc_if_, this->conn_id_, handle, data.size(), (uint8_t *) data.data(),
|
||||
esp_ble_gattc_write_char(this->gattc_if_, this->conn_id_, handle, length, const_cast<uint8_t *>(data),
|
||||
response ? ESP_GATT_WRITE_TYPE_RSP : ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
return this->check_and_log_error_("esp_ble_gattc_write_char", err);
|
||||
}
|
||||
@@ -540,7 +539,7 @@ esp_err_t BluetoothConnection::read_descriptor(uint16_t handle) {
|
||||
return this->check_and_log_error_("esp_ble_gattc_read_char_descr", err);
|
||||
}
|
||||
|
||||
esp_err_t BluetoothConnection::write_descriptor(uint16_t handle, const std::string &data, bool response) {
|
||||
esp_err_t BluetoothConnection::write_descriptor(uint16_t handle, const uint8_t *data, size_t length, bool response) {
|
||||
if (!this->connected()) {
|
||||
this->log_gatt_not_connected_("write", "descriptor");
|
||||
return ESP_GATT_NOT_CONNECTED;
|
||||
@@ -548,8 +547,11 @@ esp_err_t BluetoothConnection::write_descriptor(uint16_t handle, const std::stri
|
||||
ESP_LOGV(TAG, "[%d] [%s] Writing GATT descriptor handle %d", this->connection_index_, this->address_str_.c_str(),
|
||||
handle);
|
||||
|
||||
// ESP-IDF's API requires a non-const uint8_t* but it doesn't modify the data
|
||||
// The BTC layer immediately copies the data to its own buffer (see btc_gattc.c)
|
||||
// const_cast is safe here and was previously hidden by a C-style cast
|
||||
esp_err_t err = esp_ble_gattc_write_char_descr(
|
||||
this->gattc_if_, this->conn_id_, handle, data.size(), (uint8_t *) data.data(),
|
||||
this->gattc_if_, this->conn_id_, handle, length, const_cast<uint8_t *>(data),
|
||||
response ? ESP_GATT_WRITE_TYPE_RSP : ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
return this->check_and_log_error_("esp_ble_gattc_write_char_descr", err);
|
||||
}
|
||||
|
||||
@@ -18,9 +18,9 @@ class BluetoothConnection final : public esp32_ble_client::BLEClientBase {
|
||||
esp32_ble_tracker::AdvertisementParserType get_advertisement_parser_type() override;
|
||||
|
||||
esp_err_t read_characteristic(uint16_t handle);
|
||||
esp_err_t write_characteristic(uint16_t handle, const std::string &data, bool response);
|
||||
esp_err_t write_characteristic(uint16_t handle, const uint8_t *data, size_t length, bool response);
|
||||
esp_err_t read_descriptor(uint16_t handle);
|
||||
esp_err_t write_descriptor(uint16_t handle, const std::string &data, bool response);
|
||||
esp_err_t write_descriptor(uint16_t handle, const uint8_t *data, size_t length, bool response);
|
||||
|
||||
esp_err_t notify_characteristic(uint16_t handle, bool enable);
|
||||
|
||||
|
||||
@@ -155,16 +155,12 @@ esp32_ble_tracker::AdvertisementParserType BluetoothProxy::get_advertisement_par
|
||||
BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool reserve) {
|
||||
for (uint8_t i = 0; i < this->connection_count_; i++) {
|
||||
auto *connection = this->connections_[i];
|
||||
if (connection->get_address() == address)
|
||||
uint64_t conn_addr = connection->get_address();
|
||||
|
||||
if (conn_addr == address)
|
||||
return connection;
|
||||
}
|
||||
|
||||
if (!reserve)
|
||||
return nullptr;
|
||||
|
||||
for (uint8_t i = 0; i < this->connection_count_; i++) {
|
||||
auto *connection = this->connections_[i];
|
||||
if (connection->get_address() == 0) {
|
||||
if (reserve && conn_addr == 0) {
|
||||
connection->send_service_ = INIT_SENDING_SERVICES;
|
||||
connection->set_address(address);
|
||||
// All connections must start at INIT
|
||||
@@ -175,7 +171,6 @@ BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool rese
|
||||
return connection;
|
||||
}
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
@@ -305,7 +300,7 @@ void BluetoothProxy::bluetooth_gatt_write(const api::BluetoothGATTWriteRequest &
|
||||
return;
|
||||
}
|
||||
|
||||
auto err = connection->write_characteristic(msg.handle, msg.data, msg.response);
|
||||
auto err = connection->write_characteristic(msg.handle, msg.data, msg.data_len, msg.response);
|
||||
if (err != ESP_OK) {
|
||||
this->send_gatt_error(msg.address, msg.handle, err);
|
||||
}
|
||||
@@ -331,7 +326,7 @@ void BluetoothProxy::bluetooth_gatt_write_descriptor(const api::BluetoothGATTWri
|
||||
return;
|
||||
}
|
||||
|
||||
auto err = connection->write_descriptor(msg.handle, msg.data, true);
|
||||
auto err = connection->write_descriptor(msg.handle, msg.data, msg.data_len, true);
|
||||
if (err != ESP_OK) {
|
||||
this->send_gatt_error(msg.address, msg.handle, err);
|
||||
}
|
||||
|
||||
@@ -16,7 +16,9 @@
|
||||
|
||||
#include "bluetooth_connection.h"
|
||||
|
||||
#ifndef CONFIG_ESP_HOSTED_ENABLE_BT_BLUEDROID
|
||||
#include <esp_bt.h>
|
||||
#endif
|
||||
#include <esp_bt_device.h>
|
||||
|
||||
namespace esphome::bluetooth_proxy {
|
||||
|
||||
@@ -41,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(BME680BSECComponent),
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
|
||||
cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum(
|
||||
IAQ_MODE_OPTIONS, upper=True
|
||||
),
|
||||
|
||||
@@ -139,7 +139,7 @@ CONFIG_SCHEMA_BASE = (
|
||||
cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum(
|
||||
VOLTAGE_OPTIONS, upper=True
|
||||
),
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
|
||||
cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
|
||||
cv.Optional(
|
||||
CONF_STATE_SAVE_INTERVAL, default="6hours"
|
||||
): cv.positive_time_period_minutes,
|
||||
|
||||
@@ -2,7 +2,6 @@ import esphome.codegen as cg
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_BUFFER_SIZE, CONF_ID, CONF_TYPE
|
||||
from esphome.core import CORE
|
||||
from esphome.types import ConfigType
|
||||
|
||||
CODEOWNERS = ["@DT-art1"]
|
||||
@@ -51,9 +50,8 @@ async def to_code(config: ConfigType) -> None:
|
||||
buffer = cg.new_Pvariable(config[CONF_ENCODER_BUFFER_ID])
|
||||
cg.add(buffer.set_buffer_size(config[CONF_BUFFER_SIZE]))
|
||||
if config[CONF_TYPE] == ESP32_CAMERA_ENCODER:
|
||||
if CORE.using_esp_idf:
|
||||
add_idf_component(name="espressif/esp32-camera", ref="2.1.0")
|
||||
cg.add_build_flag("-DUSE_ESP32_CAMERA_JPEG_ENCODER")
|
||||
add_idf_component(name="espressif/esp32-camera", ref="2.1.1")
|
||||
cg.add_define("USE_ESP32_CAMERA_JPEG_ENCODER")
|
||||
var = cg.new_Pvariable(
|
||||
config[CONF_ID],
|
||||
config[CONF_QUALITY],
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
#ifdef USE_ESP32_CAMERA_JPEG_ENCODER
|
||||
|
||||
#include "esp32_camera_jpeg_encoder.h"
|
||||
@@ -15,7 +17,7 @@ camera::EncoderError ESP32CameraJPEGEncoder::encode_pixels(camera::CameraImageSp
|
||||
this->bytes_written_ = 0;
|
||||
this->out_of_output_memory_ = false;
|
||||
bool success = fmt2jpg_cb(pixels->get_data_buffer(), pixels->get_data_length(), spec->width, spec->height,
|
||||
to_internal_(spec->format), this->quality_, callback_, this);
|
||||
to_internal_(spec->format), this->quality_, callback, this);
|
||||
|
||||
if (!success)
|
||||
return camera::ENCODER_ERROR_CONFIGURATION;
|
||||
@@ -49,7 +51,7 @@ void ESP32CameraJPEGEncoder::dump_config() {
|
||||
this->output_->get_max_size(), this->quality_, this->buffer_expand_size_);
|
||||
}
|
||||
|
||||
size_t ESP32CameraJPEGEncoder::callback_(void *arg, size_t index, const void *data, size_t len) {
|
||||
size_t ESP32CameraJPEGEncoder::callback(void *arg, size_t index, const void *data, size_t len) {
|
||||
ESP32CameraJPEGEncoder *that = reinterpret_cast<ESP32CameraJPEGEncoder *>(arg);
|
||||
uint8_t *buffer = that->output_->get_data();
|
||||
size_t buffer_length = that->output_->get_max_size();
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
#ifdef USE_ESP32_CAMERA_JPEG_ENCODER
|
||||
|
||||
#include <esp_camera.h>
|
||||
@@ -24,7 +26,7 @@ class ESP32CameraJPEGEncoder : public camera::Encoder {
|
||||
void dump_config() override;
|
||||
// -------------------------
|
||||
protected:
|
||||
static size_t callback_(void *arg, size_t index, const void *data, size_t len);
|
||||
static size_t callback(void *arg, size_t index, const void *data, size_t len);
|
||||
pixformat_t to_internal_(camera::PixelFormat format);
|
||||
|
||||
camera::EncoderBuffer *output_{};
|
||||
|
||||
@@ -21,8 +21,8 @@ void Canbus::dump_config() {
|
||||
}
|
||||
}
|
||||
|
||||
void Canbus::send_data(uint32_t can_id, bool use_extended_id, bool remote_transmission_request,
|
||||
const std::vector<uint8_t> &data) {
|
||||
canbus::Error Canbus::send_data(uint32_t can_id, bool use_extended_id, bool remote_transmission_request,
|
||||
const std::vector<uint8_t> &data) {
|
||||
struct CanFrame can_message;
|
||||
|
||||
uint8_t size = static_cast<uint8_t>(data.size());
|
||||
@@ -45,13 +45,15 @@ void Canbus::send_data(uint32_t can_id, bool use_extended_id, bool remote_transm
|
||||
ESP_LOGVV(TAG, " data[%d]=%02x", i, can_message.data[i]);
|
||||
}
|
||||
|
||||
if (this->send_message(&can_message) != canbus::ERROR_OK) {
|
||||
canbus::Error error = this->send_message(&can_message);
|
||||
if (error != canbus::ERROR_OK) {
|
||||
if (use_extended_id) {
|
||||
ESP_LOGW(TAG, "send to extended id=0x%08" PRIx32 " failed!", can_id);
|
||||
ESP_LOGW(TAG, "send to extended id=0x%08" PRIx32 " failed with error %d!", can_id, error);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "send to standard id=0x%03" PRIx32 " failed!", can_id);
|
||||
ESP_LOGW(TAG, "send to standard id=0x%03" PRIx32 " failed with error %d!", can_id, error);
|
||||
}
|
||||
}
|
||||
return error;
|
||||
}
|
||||
|
||||
void Canbus::add_trigger(CanbusTrigger *trigger) {
|
||||
|
||||
@@ -70,11 +70,11 @@ class Canbus : public Component {
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
void loop() override;
|
||||
|
||||
void send_data(uint32_t can_id, bool use_extended_id, bool remote_transmission_request,
|
||||
const std::vector<uint8_t> &data);
|
||||
void send_data(uint32_t can_id, bool use_extended_id, const std::vector<uint8_t> &data) {
|
||||
canbus::Error send_data(uint32_t can_id, bool use_extended_id, bool remote_transmission_request,
|
||||
const std::vector<uint8_t> &data);
|
||||
canbus::Error send_data(uint32_t can_id, bool use_extended_id, const std::vector<uint8_t> &data) {
|
||||
// for backwards compatibility only
|
||||
this->send_data(can_id, use_extended_id, false, data);
|
||||
return this->send_data(can_id, use_extended_id, false, data);
|
||||
}
|
||||
void set_can_id(uint32_t can_id) { this->can_id_ = can_id; }
|
||||
void set_use_extended_id(bool use_extended_id) { this->use_extended_id_ = use_extended_id; }
|
||||
@@ -105,9 +105,9 @@ class Canbus : public Component {
|
||||
CallbackManager<void(uint32_t can_id, bool extended_id, bool rtr, const std::vector<uint8_t> &data)>
|
||||
callback_manager_{};
|
||||
|
||||
virtual bool setup_internal();
|
||||
virtual Error send_message(struct CanFrame *frame);
|
||||
virtual Error read_message(struct CanFrame *frame);
|
||||
virtual bool setup_internal() = 0;
|
||||
virtual Error send_message(struct CanFrame *frame) = 0;
|
||||
virtual Error read_message(struct CanFrame *frame) = 0;
|
||||
};
|
||||
|
||||
template<typename... Ts> class CanbusSendAction : public Action<Ts...>, public Parented<Canbus> {
|
||||
|
||||
@@ -8,17 +8,30 @@ namespace cap1188 {
|
||||
static const char *const TAG = "cap1188";
|
||||
|
||||
void CAP1188Component::setup() {
|
||||
// Reset device using the reset pin
|
||||
if (this->reset_pin_ != nullptr) {
|
||||
this->reset_pin_->setup();
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(100); // NOLINT
|
||||
this->reset_pin_->digital_write(true);
|
||||
delay(100); // NOLINT
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(100); // NOLINT
|
||||
this->disable_loop();
|
||||
|
||||
// no reset pin
|
||||
if (this->reset_pin_ == nullptr) {
|
||||
this->finish_setup_();
|
||||
return;
|
||||
}
|
||||
|
||||
// reset pin configured so reset before finishing setup
|
||||
this->reset_pin_->setup();
|
||||
this->reset_pin_->digital_write(false);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() {
|
||||
this->reset_pin_->digital_write(true);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() {
|
||||
this->reset_pin_->digital_write(false);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() { this->finish_setup_(); });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void CAP1188Component::finish_setup_() {
|
||||
// Check if CAP1188 is actually connected
|
||||
this->read_byte(CAP1188_PRODUCT_ID, &this->cap1188_product_id_);
|
||||
this->read_byte(CAP1188_MANUFACTURE_ID, &this->cap1188_manufacture_id_);
|
||||
@@ -44,6 +57,9 @@ void CAP1188Component::setup() {
|
||||
|
||||
// Speed up a bit
|
||||
this->write_byte(CAP1188_STAND_BY_CONFIGURATION, 0x30);
|
||||
|
||||
// Setup successful, so enable loop
|
||||
this->enable_loop();
|
||||
}
|
||||
|
||||
void CAP1188Component::dump_config() {
|
||||
|
||||
@@ -49,6 +49,8 @@ class CAP1188Component : public Component, public i2c::I2CDevice {
|
||||
void loop() override;
|
||||
|
||||
protected:
|
||||
void finish_setup_();
|
||||
|
||||
std::vector<CAP1188Channel *> channels_{};
|
||||
uint8_t touch_threshold_{0x20};
|
||||
uint8_t allow_multiple_touches_{0x80};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import web_server_base
|
||||
from esphome.components.web_server_base import CONF_WEB_SERVER_BASE_ID
|
||||
from esphome.config_helpers import filter_source_files_from_platform
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
@@ -9,11 +10,19 @@ from esphome.const import (
|
||||
PLATFORM_ESP8266,
|
||||
PLATFORM_LN882X,
|
||||
PLATFORM_RTL87XX,
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
AUTO_LOAD = ["web_server_base", "ota.web_server"]
|
||||
|
||||
def AUTO_LOAD() -> list[str]:
|
||||
auto_load = ["web_server_base", "ota.web_server"]
|
||||
if CORE.using_esp_idf:
|
||||
auto_load.append("socket")
|
||||
return auto_load
|
||||
|
||||
|
||||
DEPENDENCIES = ["wifi"]
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
|
||||
@@ -58,3 +67,11 @@ async def to_code(config):
|
||||
cg.add_library("DNSServer", None)
|
||||
if CORE.is_libretiny:
|
||||
cg.add_library("DNSServer", None)
|
||||
|
||||
|
||||
# Only compile the ESP-IDF DNS server when using ESP-IDF framework
|
||||
FILTER_SOURCE_FILES = filter_source_files_from_platform(
|
||||
{
|
||||
"dns_server_esp32_idf.cpp": {PlatformFramework.ESP32_IDF},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -11,14 +11,14 @@ namespace captive_portal {
|
||||
static const char *const TAG = "captive_portal";
|
||||
|
||||
void CaptivePortal::handle_config(AsyncWebServerRequest *request) {
|
||||
AsyncResponseStream *stream = request->beginResponseStream(F("application/json"));
|
||||
stream->addHeader(F("cache-control"), F("public, max-age=0, must-revalidate"));
|
||||
AsyncResponseStream *stream = request->beginResponseStream(ESPHOME_F("application/json"));
|
||||
stream->addHeader(ESPHOME_F("cache-control"), ESPHOME_F("public, max-age=0, must-revalidate"));
|
||||
#ifdef USE_ESP8266
|
||||
stream->print(F("{\"mac\":\""));
|
||||
stream->print(ESPHOME_F("{\"mac\":\""));
|
||||
stream->print(get_mac_address_pretty().c_str());
|
||||
stream->print(F("\",\"name\":\""));
|
||||
stream->print(ESPHOME_F("\",\"name\":\""));
|
||||
stream->print(App.get_name().c_str());
|
||||
stream->print(F("\",\"aps\":[{}"));
|
||||
stream->print(ESPHOME_F("\",\"aps\":[{}"));
|
||||
#else
|
||||
stream->printf(R"({"mac":"%s","name":"%s","aps":[{})", get_mac_address_pretty().c_str(), App.get_name().c_str());
|
||||
#endif
|
||||
@@ -29,37 +29,35 @@ void CaptivePortal::handle_config(AsyncWebServerRequest *request) {
|
||||
|
||||
// Assumes no " in ssid, possible unicode isses?
|
||||
#ifdef USE_ESP8266
|
||||
stream->print(F(",{\"ssid\":\""));
|
||||
stream->print(ESPHOME_F(",{\"ssid\":\""));
|
||||
stream->print(scan.get_ssid().c_str());
|
||||
stream->print(F("\",\"rssi\":"));
|
||||
stream->print(ESPHOME_F("\",\"rssi\":"));
|
||||
stream->print(scan.get_rssi());
|
||||
stream->print(F(",\"lock\":"));
|
||||
stream->print(ESPHOME_F(",\"lock\":"));
|
||||
stream->print(scan.get_with_auth());
|
||||
stream->print(F("}"));
|
||||
stream->print(ESPHOME_F("}"));
|
||||
#else
|
||||
stream->printf(R"(,{"ssid":"%s","rssi":%d,"lock":%d})", scan.get_ssid().c_str(), scan.get_rssi(),
|
||||
scan.get_with_auth());
|
||||
#endif
|
||||
}
|
||||
stream->print(F("]}"));
|
||||
stream->print(ESPHOME_F("]}"));
|
||||
request->send(stream);
|
||||
}
|
||||
void CaptivePortal::handle_wifisave(AsyncWebServerRequest *request) {
|
||||
std::string ssid = request->arg("ssid").c_str();
|
||||
std::string psk = request->arg("psk").c_str();
|
||||
std::string ssid = request->arg("ssid").c_str(); // NOLINT(readability-redundant-string-cstr)
|
||||
std::string psk = request->arg("psk").c_str(); // NOLINT(readability-redundant-string-cstr)
|
||||
ESP_LOGI(TAG, "Requested WiFi Settings Change:");
|
||||
ESP_LOGI(TAG, " SSID='%s'", ssid.c_str());
|
||||
ESP_LOGI(TAG, " Password=" LOG_SECRET("'%s'"), psk.c_str());
|
||||
wifi::global_wifi_component->save_wifi_sta(ssid, psk);
|
||||
wifi::global_wifi_component->start_scanning();
|
||||
request->redirect(F("/?save"));
|
||||
request->redirect(ESPHOME_F("/?save"));
|
||||
}
|
||||
|
||||
void CaptivePortal::setup() {
|
||||
#ifndef USE_ARDUINO
|
||||
// No DNS server needed for non-Arduino frameworks
|
||||
// Disable loop by default - will be enabled when captive portal starts
|
||||
this->disable_loop();
|
||||
#endif
|
||||
}
|
||||
void CaptivePortal::start() {
|
||||
this->base_->init();
|
||||
@@ -67,51 +65,47 @@ void CaptivePortal::start() {
|
||||
this->base_->add_handler(this);
|
||||
}
|
||||
|
||||
network::IPAddress ip = wifi::global_wifi_component->wifi_soft_ap_ip();
|
||||
|
||||
#ifdef USE_ESP_IDF
|
||||
// Create DNS server instance for ESP-IDF
|
||||
this->dns_server_ = make_unique<DNSServer>();
|
||||
this->dns_server_->start(ip);
|
||||
#endif
|
||||
#ifdef USE_ARDUINO
|
||||
this->dns_server_ = make_unique<DNSServer>();
|
||||
this->dns_server_->setErrorReplyCode(DNSReplyCode::NoError);
|
||||
network::IPAddress ip = wifi::global_wifi_component->wifi_soft_ap_ip();
|
||||
this->dns_server_->start(53, F("*"), ip);
|
||||
// Re-enable loop() when DNS server is started
|
||||
this->enable_loop();
|
||||
this->dns_server_->start(53, ESPHOME_F("*"), ip);
|
||||
#endif
|
||||
|
||||
this->base_->get_server()->onNotFound([this](AsyncWebServerRequest *req) {
|
||||
if (!this->active_ || req->host().c_str() == wifi::global_wifi_component->wifi_soft_ap_ip().str()) {
|
||||
req->send(404, F("text/html"), F("File not found"));
|
||||
return;
|
||||
}
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
String url = F("http://");
|
||||
url += wifi::global_wifi_component->wifi_soft_ap_ip().str().c_str();
|
||||
#else
|
||||
auto url = "http://" + wifi::global_wifi_component->wifi_soft_ap_ip().str();
|
||||
#endif
|
||||
req->redirect(url.c_str());
|
||||
});
|
||||
|
||||
this->initialized_ = true;
|
||||
this->active_ = true;
|
||||
|
||||
// Enable loop() now that captive portal is active
|
||||
this->enable_loop();
|
||||
|
||||
ESP_LOGV(TAG, "Captive portal started");
|
||||
}
|
||||
|
||||
void CaptivePortal::handleRequest(AsyncWebServerRequest *req) {
|
||||
if (req->url() == F("/")) {
|
||||
#ifndef USE_ESP8266
|
||||
auto *response = req->beginResponse(200, F("text/html"), INDEX_GZ, sizeof(INDEX_GZ));
|
||||
#else
|
||||
auto *response = req->beginResponse_P(200, F("text/html"), INDEX_GZ, sizeof(INDEX_GZ));
|
||||
#endif
|
||||
response->addHeader(F("Content-Encoding"), F("gzip"));
|
||||
req->send(response);
|
||||
return;
|
||||
} else if (req->url() == F("/config.json")) {
|
||||
if (req->url() == ESPHOME_F("/config.json")) {
|
||||
this->handle_config(req);
|
||||
return;
|
||||
} else if (req->url() == F("/wifisave")) {
|
||||
} else if (req->url() == ESPHOME_F("/wifisave")) {
|
||||
this->handle_wifisave(req);
|
||||
return;
|
||||
}
|
||||
|
||||
// All other requests get the captive portal page
|
||||
// This includes OS captive portal detection endpoints which will trigger
|
||||
// the captive portal when they don't receive their expected responses
|
||||
#ifndef USE_ESP8266
|
||||
auto *response = req->beginResponse(200, ESPHOME_F("text/html"), INDEX_GZ, sizeof(INDEX_GZ));
|
||||
#else
|
||||
auto *response = req->beginResponse_P(200, ESPHOME_F("text/html"), INDEX_GZ, sizeof(INDEX_GZ));
|
||||
#endif
|
||||
response->addHeader(ESPHOME_F("Content-Encoding"), ESPHOME_F("gzip"));
|
||||
req->send(response);
|
||||
}
|
||||
|
||||
CaptivePortal::CaptivePortal(web_server_base::WebServerBase *base) : base_(base) { global_captive_portal = this; }
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
#ifdef USE_ARDUINO
|
||||
#include <DNSServer.h>
|
||||
#endif
|
||||
#ifdef USE_ESP_IDF
|
||||
#include "dns_server_esp32_idf.h"
|
||||
#endif
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/preferences.h"
|
||||
@@ -19,41 +22,36 @@ class CaptivePortal : public AsyncWebHandler, public Component {
|
||||
CaptivePortal(web_server_base::WebServerBase *base);
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
#ifdef USE_ARDUINO
|
||||
void loop() override {
|
||||
#ifdef USE_ARDUINO
|
||||
if (this->dns_server_ != nullptr) {
|
||||
this->dns_server_->processNextRequest();
|
||||
} else {
|
||||
this->disable_loop();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ESP_IDF
|
||||
if (this->dns_server_ != nullptr) {
|
||||
this->dns_server_->process_next_request();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
float get_setup_priority() const override;
|
||||
void start();
|
||||
bool is_active() const { return this->active_; }
|
||||
void end() {
|
||||
this->active_ = false;
|
||||
this->disable_loop(); // Stop processing DNS requests
|
||||
this->base_->deinit();
|
||||
#ifdef USE_ARDUINO
|
||||
this->dns_server_->stop();
|
||||
this->dns_server_ = nullptr;
|
||||
#endif
|
||||
if (this->dns_server_ != nullptr) {
|
||||
this->dns_server_->stop();
|
||||
this->dns_server_ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool canHandle(AsyncWebServerRequest *request) const override {
|
||||
if (!this->active_)
|
||||
return false;
|
||||
|
||||
if (request->method() == HTTP_GET) {
|
||||
if (request->url() == F("/"))
|
||||
return true;
|
||||
if (request->url() == F("/config.json"))
|
||||
return true;
|
||||
if (request->url() == F("/wifisave"))
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
// Handle all GET requests when captive portal is active
|
||||
// This allows us to respond with the portal page for any URL,
|
||||
// triggering OS captive portal detection
|
||||
return this->active_ && request->method() == HTTP_GET;
|
||||
}
|
||||
|
||||
void handle_config(AsyncWebServerRequest *request);
|
||||
@@ -66,7 +64,7 @@ class CaptivePortal : public AsyncWebHandler, public Component {
|
||||
web_server_base::WebServerBase *base_;
|
||||
bool initialized_{false};
|
||||
bool active_{false};
|
||||
#ifdef USE_ARDUINO
|
||||
#if defined(USE_ARDUINO) || defined(USE_ESP_IDF)
|
||||
std::unique_ptr<DNSServer> dns_server_{nullptr};
|
||||
#endif
|
||||
};
|
||||
|
||||
205
esphome/components/captive_portal/dns_server_esp32_idf.cpp
Normal file
205
esphome/components/captive_portal/dns_server_esp32_idf.cpp
Normal file
@@ -0,0 +1,205 @@
|
||||
#include "dns_server_esp32_idf.h"
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/components/socket/socket.h"
|
||||
#include <lwip/sockets.h>
|
||||
#include <lwip/inet.h>
|
||||
|
||||
namespace esphome::captive_portal {
|
||||
|
||||
static const char *const TAG = "captive_portal.dns";
|
||||
|
||||
// DNS constants
|
||||
static constexpr uint16_t DNS_PORT = 53;
|
||||
static constexpr uint16_t DNS_QR_FLAG = 1 << 15;
|
||||
static constexpr uint16_t DNS_OPCODE_MASK = 0x7800;
|
||||
static constexpr uint16_t DNS_QTYPE_A = 0x0001;
|
||||
static constexpr uint16_t DNS_QCLASS_IN = 0x0001;
|
||||
static constexpr uint16_t DNS_ANSWER_TTL = 300;
|
||||
|
||||
// DNS Header structure
|
||||
struct DNSHeader {
|
||||
uint16_t id;
|
||||
uint16_t flags;
|
||||
uint16_t qd_count;
|
||||
uint16_t an_count;
|
||||
uint16_t ns_count;
|
||||
uint16_t ar_count;
|
||||
} __attribute__((packed));
|
||||
|
||||
// DNS Question structure
|
||||
struct DNSQuestion {
|
||||
uint16_t type;
|
||||
uint16_t dns_class;
|
||||
} __attribute__((packed));
|
||||
|
||||
// DNS Answer structure
|
||||
struct DNSAnswer {
|
||||
uint16_t ptr_offset;
|
||||
uint16_t type;
|
||||
uint16_t dns_class;
|
||||
uint32_t ttl;
|
||||
uint16_t addr_len;
|
||||
uint32_t ip_addr;
|
||||
} __attribute__((packed));
|
||||
|
||||
void DNSServer::start(const network::IPAddress &ip) {
|
||||
this->server_ip_ = ip;
|
||||
ESP_LOGV(TAG, "Starting DNS server on %s", ip.str().c_str());
|
||||
|
||||
// Create loop-monitored UDP socket
|
||||
this->socket_ = socket::socket_ip_loop_monitored(SOCK_DGRAM, IPPROTO_UDP);
|
||||
if (this->socket_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Socket create failed");
|
||||
return;
|
||||
}
|
||||
|
||||
// Set socket options
|
||||
int enable = 1;
|
||||
this->socket_->setsockopt(SOL_SOCKET, SO_REUSEADDR, &enable, sizeof(enable));
|
||||
|
||||
// Bind to port 53
|
||||
struct sockaddr_storage server_addr = {};
|
||||
socklen_t addr_len = socket::set_sockaddr_any((struct sockaddr *) &server_addr, sizeof(server_addr), DNS_PORT);
|
||||
|
||||
int err = this->socket_->bind((struct sockaddr *) &server_addr, addr_len);
|
||||
if (err != 0) {
|
||||
ESP_LOGE(TAG, "Bind failed: %d", errno);
|
||||
this->socket_ = nullptr;
|
||||
return;
|
||||
}
|
||||
ESP_LOGV(TAG, "Bound to port %d", DNS_PORT);
|
||||
}
|
||||
|
||||
void DNSServer::stop() {
|
||||
if (this->socket_ != nullptr) {
|
||||
this->socket_->close();
|
||||
this->socket_ = nullptr;
|
||||
}
|
||||
ESP_LOGV(TAG, "Stopped");
|
||||
}
|
||||
|
||||
void DNSServer::process_next_request() {
|
||||
// Process one request if socket is valid and data is available
|
||||
if (this->socket_ == nullptr || !this->socket_->ready()) {
|
||||
return;
|
||||
}
|
||||
struct sockaddr_in client_addr;
|
||||
socklen_t client_addr_len = sizeof(client_addr);
|
||||
|
||||
// Receive DNS request using raw fd for recvfrom
|
||||
int fd = this->socket_->get_fd();
|
||||
if (fd < 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
ssize_t len = recvfrom(fd, this->buffer_, sizeof(this->buffer_), MSG_DONTWAIT, (struct sockaddr *) &client_addr,
|
||||
&client_addr_len);
|
||||
|
||||
if (len < 0) {
|
||||
if (errno != EAGAIN && errno != EWOULDBLOCK && errno != EINTR) {
|
||||
ESP_LOGE(TAG, "recvfrom failed: %d", errno);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
ESP_LOGVV(TAG, "Received %d bytes from %s:%d", len, inet_ntoa(client_addr.sin_addr), ntohs(client_addr.sin_port));
|
||||
|
||||
if (len < static_cast<ssize_t>(sizeof(DNSHeader) + 1)) {
|
||||
ESP_LOGV(TAG, "Request too short: %d", len);
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse DNS header
|
||||
DNSHeader *header = (DNSHeader *) this->buffer_;
|
||||
uint16_t flags = ntohs(header->flags);
|
||||
uint16_t qd_count = ntohs(header->qd_count);
|
||||
|
||||
// Check if it's a standard query
|
||||
if ((flags & DNS_QR_FLAG) || (flags & DNS_OPCODE_MASK) || qd_count != 1) {
|
||||
ESP_LOGV(TAG, "Not a standard query: flags=0x%04X, qd_count=%d", flags, qd_count);
|
||||
return; // Not a standard query
|
||||
}
|
||||
|
||||
// Parse domain name (we don't actually care about it - redirect everything)
|
||||
uint8_t *ptr = this->buffer_ + sizeof(DNSHeader);
|
||||
uint8_t *end = this->buffer_ + len;
|
||||
|
||||
while (ptr < end && *ptr != 0) {
|
||||
uint8_t label_len = *ptr;
|
||||
if (label_len > 63) { // Check for invalid label length
|
||||
return;
|
||||
}
|
||||
// Check if we have room for this label plus the length byte
|
||||
if (ptr + label_len + 1 > end) {
|
||||
return; // Would overflow
|
||||
}
|
||||
ptr += label_len + 1;
|
||||
}
|
||||
|
||||
// Check if we reached a proper null terminator
|
||||
if (ptr >= end || *ptr != 0) {
|
||||
return; // Name not terminated or truncated
|
||||
}
|
||||
ptr++; // Skip the null terminator
|
||||
|
||||
// Check we have room for the question
|
||||
if (ptr + sizeof(DNSQuestion) > end) {
|
||||
return; // Request truncated
|
||||
}
|
||||
|
||||
// Parse DNS question
|
||||
DNSQuestion *question = (DNSQuestion *) ptr;
|
||||
uint16_t qtype = ntohs(question->type);
|
||||
uint16_t qclass = ntohs(question->dns_class);
|
||||
|
||||
// We only handle A queries
|
||||
if (qtype != DNS_QTYPE_A || qclass != DNS_QCLASS_IN) {
|
||||
ESP_LOGV(TAG, "Not an A query: type=0x%04X, class=0x%04X", qtype, qclass);
|
||||
return; // Not an A query
|
||||
}
|
||||
|
||||
// Build DNS response by modifying the request in-place
|
||||
header->flags = htons(DNS_QR_FLAG | 0x8000); // Response + Authoritative
|
||||
header->an_count = htons(1); // One answer
|
||||
|
||||
// Add answer section after the question
|
||||
size_t question_len = (ptr + sizeof(DNSQuestion)) - this->buffer_ - sizeof(DNSHeader);
|
||||
size_t answer_offset = sizeof(DNSHeader) + question_len;
|
||||
|
||||
// Check if we have room for the answer
|
||||
if (answer_offset + sizeof(DNSAnswer) > sizeof(this->buffer_)) {
|
||||
ESP_LOGW(TAG, "Response too large");
|
||||
return;
|
||||
}
|
||||
|
||||
DNSAnswer *answer = (DNSAnswer *) (this->buffer_ + answer_offset);
|
||||
|
||||
// Pointer to name in question (offset from start of packet)
|
||||
answer->ptr_offset = htons(0xC000 | sizeof(DNSHeader));
|
||||
answer->type = htons(DNS_QTYPE_A);
|
||||
answer->dns_class = htons(DNS_QCLASS_IN);
|
||||
answer->ttl = htonl(DNS_ANSWER_TTL);
|
||||
answer->addr_len = htons(4);
|
||||
|
||||
// Get the raw IP address
|
||||
ip4_addr_t addr = this->server_ip_;
|
||||
answer->ip_addr = addr.addr;
|
||||
|
||||
size_t response_len = answer_offset + sizeof(DNSAnswer);
|
||||
|
||||
// Send response
|
||||
ssize_t sent =
|
||||
this->socket_->sendto(this->buffer_, response_len, 0, (struct sockaddr *) &client_addr, client_addr_len);
|
||||
if (sent < 0) {
|
||||
ESP_LOGV(TAG, "Send failed: %d", errno);
|
||||
} else {
|
||||
ESP_LOGV(TAG, "Sent %d bytes", sent);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace esphome::captive_portal
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
27
esphome/components/captive_portal/dns_server_esp32_idf.h
Normal file
27
esphome/components/captive_portal/dns_server_esp32_idf.h
Normal file
@@ -0,0 +1,27 @@
|
||||
#pragma once
|
||||
#ifdef USE_ESP_IDF
|
||||
|
||||
#include <memory>
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/network/ip_address.h"
|
||||
#include "esphome/components/socket/socket.h"
|
||||
|
||||
namespace esphome::captive_portal {
|
||||
|
||||
class DNSServer {
|
||||
public:
|
||||
void start(const network::IPAddress &ip);
|
||||
void stop();
|
||||
void process_next_request();
|
||||
|
||||
protected:
|
||||
static constexpr size_t DNS_BUFFER_SIZE = 192;
|
||||
|
||||
std::unique_ptr<socket::Socket> socket_{nullptr};
|
||||
network::IPAddress server_ip_;
|
||||
uint8_t buffer_[DNS_BUFFER_SIZE];
|
||||
};
|
||||
|
||||
} // namespace esphome::captive_portal
|
||||
|
||||
#endif // USE_ESP_IDF
|
||||
@@ -155,7 +155,7 @@ void CCS811Component::dump_config() {
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
LOG_SENSOR(" ", "CO2 Sensor", this->co2_);
|
||||
LOG_SENSOR(" ", "TVOC Sensor", this->tvoc_);
|
||||
LOG_TEXT_SENSOR(" ", "Firmware Version Sensor", this->version_)
|
||||
LOG_TEXT_SENSOR(" ", "Firmware Version Sensor", this->version_);
|
||||
if (this->baseline_) {
|
||||
ESP_LOGCONFIG(TAG, " Baseline: %04X", *this->baseline_);
|
||||
} else {
|
||||
|
||||
@@ -96,7 +96,8 @@ void ClimateCall::validate_() {
|
||||
}
|
||||
if (this->target_temperature_.has_value()) {
|
||||
auto target = *this->target_temperature_;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set target temperature for climate device "
|
||||
"with two-point target temperature!");
|
||||
this->target_temperature_.reset();
|
||||
@@ -106,7 +107,8 @@ void ClimateCall::validate_() {
|
||||
}
|
||||
}
|
||||
if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) {
|
||||
if (!traits.get_supports_two_point_target_temperature()) {
|
||||
if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!");
|
||||
this->target_temperature_low_.reset();
|
||||
this->target_temperature_high_.reset();
|
||||
@@ -350,13 +352,14 @@ void Climate::save_state_() {
|
||||
|
||||
state.mode = this->mode;
|
||||
auto traits = this->get_traits();
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
state.target_temperature_low = this->target_temperature_low;
|
||||
state.target_temperature_high = this->target_temperature_high;
|
||||
} else {
|
||||
state.target_temperature = this->target_temperature;
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
state.target_humidity = this->target_humidity;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && fan_mode.has_value()) {
|
||||
@@ -367,9 +370,11 @@ void Climate::save_state_() {
|
||||
state.uses_custom_fan_mode = true;
|
||||
const auto &supported = traits.get_supported_custom_fan_modes();
|
||||
std::vector<std::string> vec{supported.begin(), supported.end()};
|
||||
auto it = std::find(vec.begin(), vec.end(), custom_fan_mode);
|
||||
if (it != vec.end()) {
|
||||
state.custom_fan_mode = std::distance(vec.begin(), it);
|
||||
for (size_t i = 0; i < vec.size(); i++) {
|
||||
if (vec[i] == custom_fan_mode) {
|
||||
state.custom_fan_mode = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_presets() && preset.has_value()) {
|
||||
@@ -380,10 +385,11 @@ void Climate::save_state_() {
|
||||
state.uses_custom_preset = true;
|
||||
const auto &supported = traits.get_supported_custom_presets();
|
||||
std::vector<std::string> vec{supported.begin(), supported.end()};
|
||||
auto it = std::find(vec.begin(), vec.end(), custom_preset);
|
||||
// only set custom preset if value exists, otherwise leave it as is
|
||||
if (it != vec.cend()) {
|
||||
state.custom_preset = std::distance(vec.begin(), it);
|
||||
for (size_t i = 0; i < vec.size(); i++) {
|
||||
if (vec[i] == custom_preset) {
|
||||
state.custom_preset = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
@@ -397,7 +403,7 @@ void Climate::publish_state() {
|
||||
auto traits = this->get_traits();
|
||||
|
||||
ESP_LOGD(TAG, " Mode: %s", LOG_STR_ARG(climate_mode_to_string(this->mode)));
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
ESP_LOGD(TAG, " Action: %s", LOG_STR_ARG(climate_action_to_string(this->action)));
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && this->fan_mode.has_value()) {
|
||||
@@ -415,19 +421,20 @@ void Climate::publish_state() {
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
ESP_LOGD(TAG, " Swing Mode: %s", LOG_STR_ARG(climate_swing_mode_to_string(this->swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGD(TAG, " Current Temperature: %.2f°C", this->current_temperature);
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGD(TAG, " Target Temperature: Low: %.2f°C High: %.2f°C", this->target_temperature_low,
|
||||
this->target_temperature_high);
|
||||
} else {
|
||||
ESP_LOGD(TAG, " Target Temperature: %.2f°C", this->target_temperature);
|
||||
}
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGD(TAG, " Current Humidity: %.0f%%", this->current_humidity);
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
ESP_LOGD(TAG, " Target Humidity: %.0f%%", this->target_humidity);
|
||||
}
|
||||
|
||||
@@ -482,13 +489,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
||||
auto call = climate->make_call();
|
||||
auto traits = climate->get_traits();
|
||||
call.set_mode(this->mode);
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
call.set_target_temperature_low(this->target_temperature_low);
|
||||
call.set_target_temperature_high(this->target_temperature_high);
|
||||
} else {
|
||||
call.set_target_temperature(this->target_temperature);
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
call.set_target_humidity(this->target_humidity);
|
||||
}
|
||||
if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) {
|
||||
@@ -505,13 +513,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
||||
void ClimateDeviceRestoreState::apply(Climate *climate) {
|
||||
auto traits = climate->get_traits();
|
||||
climate->mode = this->mode;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
climate->target_temperature_low = this->target_temperature_low;
|
||||
climate->target_temperature_high = this->target_temperature_high;
|
||||
} else {
|
||||
climate->target_temperature = this->target_temperature;
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
climate->target_humidity = this->target_humidity;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) {
|
||||
@@ -577,28 +586,30 @@ void Climate::dump_traits_(const char *tag) {
|
||||
" Target: %.1f",
|
||||
traits.get_visual_min_temperature(), traits.get_visual_max_temperature(),
|
||||
traits.get_visual_target_temperature_step());
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step());
|
||||
}
|
||||
if (traits.get_supports_target_humidity() || traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY |
|
||||
climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag,
|
||||
" - Min humidity: %.0f\n"
|
||||
" - Max humidity: %.0f",
|
||||
traits.get_visual_min_humidity(), traits.get_visual_max_humidity());
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature");
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current temperature");
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports target humidity");
|
||||
}
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current humidity");
|
||||
}
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports action");
|
||||
}
|
||||
if (!traits.get_supported_modes().empty()) {
|
||||
|
||||
@@ -98,6 +98,21 @@ enum ClimatePreset : uint8_t {
|
||||
CLIMATE_PRESET_ACTIVITY = 7,
|
||||
};
|
||||
|
||||
enum ClimateFeature : uint32_t {
|
||||
// Reporting current temperature is supported
|
||||
CLIMATE_SUPPORTS_CURRENT_TEMPERATURE = 1 << 0,
|
||||
// Setting two target temperatures is supported (used in conjunction with CLIMATE_MODE_HEAT_COOL)
|
||||
CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE = 1 << 1,
|
||||
// Single-point mode is NOT supported (UI always displays two handles, setting 'target_temperature' is not supported)
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE = 1 << 2,
|
||||
// Reporting current humidity is supported
|
||||
CLIMATE_SUPPORTS_CURRENT_HUMIDITY = 1 << 3,
|
||||
// Setting a target humidity is supported
|
||||
CLIMATE_SUPPORTS_TARGET_HUMIDITY = 1 << 4,
|
||||
// Reporting current climate action is supported
|
||||
CLIMATE_SUPPORTS_ACTION = 1 << 5,
|
||||
};
|
||||
|
||||
/// Convert the given ClimateMode to a human-readable string.
|
||||
const LogString *climate_mode_to_string(ClimateMode mode);
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "climate_mode.h"
|
||||
#include <set>
|
||||
#include "climate_mode.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
namespace esphome {
|
||||
|
||||
@@ -21,91 +21,100 @@ namespace climate {
|
||||
* - Target Temperature
|
||||
*
|
||||
* All other properties and modes are optional and the integration must mark
|
||||
* each of them as supported by setting the appropriate flag here.
|
||||
* each of them as supported by setting the appropriate flag(s) here.
|
||||
*
|
||||
* - supports current temperature - if the climate device supports reporting a current temperature
|
||||
* - supports two point target temperature - if the climate device's target temperature should be
|
||||
* split in target_temperature_low and target_temperature_high instead of just the single target_temperature
|
||||
* - feature flags: see ClimateFeatures enum in climate_mode.h
|
||||
* - supports modes:
|
||||
* - auto mode (automatic control)
|
||||
* - cool mode (lowers current temperature)
|
||||
* - heat mode (increases current temperature)
|
||||
* - dry mode (removes humidity from air)
|
||||
* - fan mode (only turns on fan)
|
||||
* - supports action - if the climate device supports reporting the active
|
||||
* current action of the device with the action property.
|
||||
* - supports fan modes - optionally, if it has a fan which can be configured in different ways:
|
||||
* - on, off, auto, high, medium, low, middle, focus, diffuse, quiet
|
||||
* - supports swing modes - optionally, if it has a swing which can be configured in different ways:
|
||||
* - off, both, vertical, horizontal
|
||||
*
|
||||
* This class also contains static data for the climate device display:
|
||||
* - visual min/max temperature - tells the frontend what range of temperatures the climate device
|
||||
* should display (gauge min/max values)
|
||||
* - visual min/max temperature/humidity - tells the frontend what range of temperature/humidity the
|
||||
* climate device should display (gauge min/max values)
|
||||
* - temperature step - the step with which to increase/decrease target temperature.
|
||||
* This also affects with how many decimal places the temperature is shown
|
||||
*/
|
||||
class ClimateTraits {
|
||||
public:
|
||||
bool get_supports_current_temperature() const { return this->supports_current_temperature_; }
|
||||
/// Get/set feature flags (see ClimateFeatures enum in climate_mode.h)
|
||||
uint32_t get_feature_flags() const { return this->feature_flags_; }
|
||||
void add_feature_flags(uint32_t feature_flags) { this->feature_flags_ |= feature_flags; }
|
||||
void clear_feature_flags(uint32_t feature_flags) { this->feature_flags_ &= ~feature_flags; }
|
||||
bool has_feature_flags(uint32_t feature_flags) const { return this->feature_flags_ & feature_flags; }
|
||||
void set_feature_flags(uint32_t feature_flags) { this->feature_flags_ = feature_flags; }
|
||||
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_current_temperature() const {
|
||||
return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_current_temperature(bool supports_current_temperature) {
|
||||
this->supports_current_temperature_ = supports_current_temperature;
|
||||
if (supports_current_temperature) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
}
|
||||
bool get_supports_current_humidity() const { return this->supports_current_humidity_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_current_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_current_humidity(bool supports_current_humidity) {
|
||||
this->supports_current_humidity_ = supports_current_humidity;
|
||||
if (supports_current_humidity) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
}
|
||||
}
|
||||
bool get_supports_two_point_target_temperature() const { return this->supports_two_point_target_temperature_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_two_point_target_temperature() const {
|
||||
return this->has_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_two_point_target_temperature(bool supports_two_point_target_temperature) {
|
||||
this->supports_two_point_target_temperature_ = supports_two_point_target_temperature;
|
||||
if (supports_two_point_target_temperature)
|
||||
// Use CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE to mimic previous behavior
|
||||
{
|
||||
this->add_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
}
|
||||
}
|
||||
bool get_supports_target_humidity() const { return this->supports_target_humidity_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_target_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_target_humidity(bool supports_target_humidity) {
|
||||
this->supports_target_humidity_ = supports_target_humidity;
|
||||
if (supports_target_humidity) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
}
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_action() const { return this->has_feature_flags(CLIMATE_SUPPORTS_ACTION); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_action(bool supports_action) {
|
||||
if (supports_action) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||
}
|
||||
}
|
||||
|
||||
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
|
||||
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_auto_mode(bool supports_auto_mode) { set_mode_support_(CLIMATE_MODE_AUTO, supports_auto_mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_cool_mode(bool supports_cool_mode) { set_mode_support_(CLIMATE_MODE_COOL, supports_cool_mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_heat_mode(bool supports_heat_mode) { set_mode_support_(CLIMATE_MODE_HEAT, supports_heat_mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_heat_cool_mode(bool supported) { set_mode_support_(CLIMATE_MODE_HEAT_COOL, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_fan_only_mode(bool supports_fan_only_mode) {
|
||||
set_mode_support_(CLIMATE_MODE_FAN_ONLY, supports_fan_only_mode);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
void set_supports_dry_mode(bool supports_dry_mode) { set_mode_support_(CLIMATE_MODE_DRY, supports_dry_mode); }
|
||||
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
|
||||
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
|
||||
|
||||
void set_supports_action(bool supports_action) { this->supports_action_ = supports_action; }
|
||||
bool get_supports_action() const { return this->supports_action_; }
|
||||
|
||||
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
|
||||
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
|
||||
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_on(bool supported) { set_fan_mode_support_(CLIMATE_FAN_ON, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_off(bool supported) { set_fan_mode_support_(CLIMATE_FAN_OFF, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_auto(bool supported) { set_fan_mode_support_(CLIMATE_FAN_AUTO, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_low(bool supported) { set_fan_mode_support_(CLIMATE_FAN_LOW, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_medium(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MEDIUM, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_high(bool supported) { set_fan_mode_support_(CLIMATE_FAN_HIGH, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_middle(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MIDDLE, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_focus(bool supported) { set_fan_mode_support_(CLIMATE_FAN_FOCUS, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20")
|
||||
void set_supports_fan_mode_diffuse(bool supported) { set_fan_mode_support_(CLIMATE_FAN_DIFFUSE, supported); }
|
||||
bool supports_fan_mode(ClimateFanMode fan_mode) const { return this->supported_fan_modes_.count(fan_mode); }
|
||||
bool get_supports_fan_modes() const {
|
||||
return !this->supported_fan_modes_.empty() || !this->supported_custom_fan_modes_.empty();
|
||||
@@ -137,16 +146,6 @@ class ClimateTraits {
|
||||
|
||||
void set_supported_swing_modes(std::set<ClimateSwingMode> modes) { this->supported_swing_modes_ = std::move(modes); }
|
||||
void add_supported_swing_mode(ClimateSwingMode mode) { this->supported_swing_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_off(bool supported) { set_swing_mode_support_(CLIMATE_SWING_OFF, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_both(bool supported) { set_swing_mode_support_(CLIMATE_SWING_BOTH, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_vertical(bool supported) { set_swing_mode_support_(CLIMATE_SWING_VERTICAL, supported); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20")
|
||||
void set_supports_swing_mode_horizontal(bool supported) {
|
||||
set_swing_mode_support_(CLIMATE_SWING_HORIZONTAL, supported);
|
||||
}
|
||||
bool supports_swing_mode(ClimateSwingMode swing_mode) const { return this->supported_swing_modes_.count(swing_mode); }
|
||||
bool get_supports_swing_modes() const { return !this->supported_swing_modes_.empty(); }
|
||||
const std::set<ClimateSwingMode> &get_supported_swing_modes() const { return this->supported_swing_modes_; }
|
||||
@@ -219,24 +218,20 @@ class ClimateTraits {
|
||||
}
|
||||
}
|
||||
|
||||
bool supports_current_temperature_{false};
|
||||
bool supports_current_humidity_{false};
|
||||
bool supports_two_point_target_temperature_{false};
|
||||
bool supports_target_humidity_{false};
|
||||
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
||||
bool supports_action_{false};
|
||||
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
||||
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
||||
std::set<climate::ClimatePreset> supported_presets_;
|
||||
std::set<std::string> supported_custom_fan_modes_;
|
||||
std::set<std::string> supported_custom_presets_;
|
||||
|
||||
uint32_t feature_flags_{0};
|
||||
float visual_min_temperature_{10};
|
||||
float visual_max_temperature_{30};
|
||||
float visual_target_temperature_step_{0.1};
|
||||
float visual_current_temperature_step_{0.1};
|
||||
float visual_min_humidity_{30};
|
||||
float visual_max_humidity_{99};
|
||||
|
||||
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
||||
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
||||
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
||||
std::set<climate::ClimatePreset> supported_presets_;
|
||||
std::set<std::string> supported_custom_fan_modes_;
|
||||
std::set<std::string> supported_custom_presets_;
|
||||
};
|
||||
|
||||
} // namespace climate
|
||||
|
||||
@@ -13,7 +13,7 @@ static const uint8_t C_M1106_CMD_SET_CO2_CALIB_RESPONSE[4] = {0x16, 0x01, 0x03,
|
||||
|
||||
uint8_t cm1106_checksum(const uint8_t *response, size_t len) {
|
||||
uint8_t crc = 0;
|
||||
for (int i = 0; i < len - 1; i++) {
|
||||
for (size_t i = 0; i < len - 1; i++) {
|
||||
crc -= response[i];
|
||||
}
|
||||
return crc;
|
||||
|
||||
@@ -11,7 +11,7 @@ void CopyLock::setup() {
|
||||
|
||||
traits.set_assumed_state(source_->traits.get_assumed_state());
|
||||
traits.set_requires_code(source_->traits.get_requires_code());
|
||||
traits.set_supported_states(source_->traits.get_supported_states());
|
||||
traits.set_supported_states_mask(source_->traits.get_supported_states_mask());
|
||||
traits.set_supports_open(source_->traits.get_supports_open());
|
||||
|
||||
this->publish_state(source_->state);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include "cover.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include <strings.h>
|
||||
|
||||
namespace esphome {
|
||||
namespace cover {
|
||||
|
||||
@@ -26,7 +26,7 @@ void DaikinArcClimate::transmit_query_() {
|
||||
uint8_t remote_header[8] = {0x11, 0xDA, 0x27, 0x00, 0x84, 0x87, 0x20, 0x00};
|
||||
|
||||
// Calculate checksum
|
||||
for (int i = 0; i < sizeof(remote_header) - 1; i++) {
|
||||
for (size_t i = 0; i < sizeof(remote_header) - 1; i++) {
|
||||
remote_header[sizeof(remote_header) - 1] += remote_header[i];
|
||||
}
|
||||
|
||||
@@ -102,7 +102,7 @@ void DaikinArcClimate::transmit_state() {
|
||||
remote_state[9] = fan_speed & 0xff;
|
||||
|
||||
// Calculate checksum
|
||||
for (int i = 0; i < sizeof(remote_header) - 1; i++) {
|
||||
for (size_t i = 0; i < sizeof(remote_header) - 1; i++) {
|
||||
remote_header[sizeof(remote_header) - 1] += remote_header[i];
|
||||
}
|
||||
|
||||
@@ -350,7 +350,7 @@ bool DaikinArcClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
bool valid_daikin_frame = false;
|
||||
if (data.expect_item(DAIKIN_HEADER_MARK, DAIKIN_HEADER_SPACE)) {
|
||||
valid_daikin_frame = true;
|
||||
int bytes_count = data.size() / 2 / 8;
|
||||
size_t bytes_count = data.size() / 2 / 8;
|
||||
std::unique_ptr<char[]> buf(new char[bytes_count * 3 + 1]);
|
||||
buf[0] = '\0';
|
||||
for (size_t i = 0; i < bytes_count; i++) {
|
||||
@@ -370,7 +370,7 @@ bool DaikinArcClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
if (!valid_daikin_frame) {
|
||||
char sbuf[16 * 10 + 1];
|
||||
sbuf[0] = '\0';
|
||||
for (size_t j = 0; j < data.size(); j++) {
|
||||
for (size_t j = 0; j < static_cast<size_t>(data.size()); j++) {
|
||||
if ((j - 2) % 16 == 0) {
|
||||
if (j > 0) {
|
||||
ESP_LOGD(TAG, "DATA %04x: %s", (j - 16 > 0xffff ? 0 : j - 16), sbuf);
|
||||
@@ -380,19 +380,26 @@ bool DaikinArcClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
char type_ch = ' ';
|
||||
// debug_tolerance = 25%
|
||||
|
||||
if (DAIKIN_DBG_LOWER(DAIKIN_ARC_PRE_MARK) <= data[j] && data[j] <= DAIKIN_DBG_UPPER(DAIKIN_ARC_PRE_MARK))
|
||||
if (static_cast<int32_t>(DAIKIN_DBG_LOWER(DAIKIN_ARC_PRE_MARK)) <= data[j] &&
|
||||
data[j] <= static_cast<int32_t>(DAIKIN_DBG_UPPER(DAIKIN_ARC_PRE_MARK)))
|
||||
type_ch = 'P';
|
||||
if (DAIKIN_DBG_LOWER(DAIKIN_ARC_PRE_SPACE) <= -data[j] && -data[j] <= DAIKIN_DBG_UPPER(DAIKIN_ARC_PRE_SPACE))
|
||||
if (static_cast<int32_t>(DAIKIN_DBG_LOWER(DAIKIN_ARC_PRE_SPACE)) <= -data[j] &&
|
||||
-data[j] <= static_cast<int32_t>(DAIKIN_DBG_UPPER(DAIKIN_ARC_PRE_SPACE)))
|
||||
type_ch = 'a';
|
||||
if (DAIKIN_DBG_LOWER(DAIKIN_HEADER_MARK) <= data[j] && data[j] <= DAIKIN_DBG_UPPER(DAIKIN_HEADER_MARK))
|
||||
if (static_cast<int32_t>(DAIKIN_DBG_LOWER(DAIKIN_HEADER_MARK)) <= data[j] &&
|
||||
data[j] <= static_cast<int32_t>(DAIKIN_DBG_UPPER(DAIKIN_HEADER_MARK)))
|
||||
type_ch = 'H';
|
||||
if (DAIKIN_DBG_LOWER(DAIKIN_HEADER_SPACE) <= -data[j] && -data[j] <= DAIKIN_DBG_UPPER(DAIKIN_HEADER_SPACE))
|
||||
if (static_cast<int32_t>(DAIKIN_DBG_LOWER(DAIKIN_HEADER_SPACE)) <= -data[j] &&
|
||||
-data[j] <= static_cast<int32_t>(DAIKIN_DBG_UPPER(DAIKIN_HEADER_SPACE)))
|
||||
type_ch = 'h';
|
||||
if (DAIKIN_DBG_LOWER(DAIKIN_BIT_MARK) <= data[j] && data[j] <= DAIKIN_DBG_UPPER(DAIKIN_BIT_MARK))
|
||||
if (static_cast<int32_t>(DAIKIN_DBG_LOWER(DAIKIN_BIT_MARK)) <= data[j] &&
|
||||
data[j] <= static_cast<int32_t>(DAIKIN_DBG_UPPER(DAIKIN_BIT_MARK)))
|
||||
type_ch = 'B';
|
||||
if (DAIKIN_DBG_LOWER(DAIKIN_ONE_SPACE) <= -data[j] && -data[j] <= DAIKIN_DBG_UPPER(DAIKIN_ONE_SPACE))
|
||||
if (static_cast<int32_t>(DAIKIN_DBG_LOWER(DAIKIN_ONE_SPACE)) <= -data[j] &&
|
||||
-data[j] <= static_cast<int32_t>(DAIKIN_DBG_UPPER(DAIKIN_ONE_SPACE)))
|
||||
type_ch = '1';
|
||||
if (DAIKIN_DBG_LOWER(DAIKIN_ZERO_SPACE) <= -data[j] && -data[j] <= DAIKIN_DBG_UPPER(DAIKIN_ZERO_SPACE))
|
||||
if (static_cast<int32_t>(DAIKIN_DBG_LOWER(DAIKIN_ZERO_SPACE)) <= -data[j] &&
|
||||
-data[j] <= static_cast<int32_t>(DAIKIN_DBG_UPPER(DAIKIN_ZERO_SPACE)))
|
||||
type_ch = '0';
|
||||
|
||||
if (abs(data[j]) > 100000) {
|
||||
@@ -400,7 +407,7 @@ bool DaikinArcClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
} else {
|
||||
sprintf(sbuf, "%s%-5d[%c] ", sbuf, (int) (round(data[j] / 10.) * 10), type_ch);
|
||||
}
|
||||
if (j == data.size() - 1) {
|
||||
if (j + 1 == static_cast<size_t>(data.size())) {
|
||||
ESP_LOGD(TAG, "DATA %04x: %s", (j - 8 > 0xffff ? 0 : j - 8), sbuf);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ namespace dashboard_import {
|
||||
|
||||
static std::string g_package_import_url; // NOLINT
|
||||
|
||||
std::string get_package_import_url() { return g_package_import_url; }
|
||||
const std::string &get_package_import_url() { return g_package_import_url; }
|
||||
void set_package_import_url(std::string url) { g_package_import_url = std::move(url); }
|
||||
|
||||
} // namespace dashboard_import
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
namespace esphome {
|
||||
namespace dashboard_import {
|
||||
|
||||
std::string get_package_import_url();
|
||||
const std::string &get_package_import_url();
|
||||
void set_package_import_url(std::string url);
|
||||
|
||||
} // namespace dashboard_import
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user