mirror of
https://github.com/esphome/esphome.git
synced 2025-08-10 20:29:24 +00:00
Compare commits
1414 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
431d3578a5 | ||
![]() |
5f02d86841 | ||
![]() |
a7ec57d4be | ||
![]() |
1ea5cc497f | ||
![]() |
b601cf6bc6 | ||
![]() |
5057caa7fc | ||
![]() |
95bef53d37 | ||
![]() |
ea019a057b | ||
![]() |
b860a317b9 | ||
![]() |
9591c903f7 | ||
![]() |
65fbb8bc60 | ||
![]() |
97428f2ba2 | ||
![]() |
9ab6a7b7ff | ||
![]() |
e2ad6fe3d8 | ||
![]() |
6781d08c9b | ||
![]() |
828f7946ea | ||
![]() |
23c663d5d4 | ||
![]() |
6a99789c92 | ||
![]() |
52e13164b4 | ||
![]() |
28f2582256 | ||
![]() |
652f6058d1 | ||
![]() |
717aab7c8b | ||
![]() |
5e799b5284 | ||
![]() |
9f36b25d4e | ||
![]() |
c8058e9636 | ||
![]() |
f2474c5c45 | ||
![]() |
7acc36d39d | ||
![]() |
b01db991a5 | ||
![]() |
8c849b9002 | ||
![]() |
6a0d4cb5a9 | ||
![]() |
72002ce70e | ||
![]() |
c67539cf5b | ||
![]() |
dcd3d2084d | ||
![]() |
585bb6dac8 | ||
![]() |
cac3055261 | ||
![]() |
6f7e6cc944 | ||
![]() |
0a841fcc50 | ||
![]() |
3c64c9b0e9 | ||
![]() |
34df25da39 | ||
![]() |
9586fb95d1 | ||
![]() |
3ee6348e41 | ||
![]() |
543f2c8152 | ||
![]() |
16d11be213 | ||
![]() |
e49b568fd4 | ||
![]() |
22ab830ff3 | ||
![]() |
095d3181cd | ||
![]() |
9aa14a2e83 | ||
![]() |
ac15ce576b | ||
![]() |
498b59e998 | ||
![]() |
63c420254a | ||
![]() |
765e641d08 | ||
![]() |
be16d10b7d | ||
![]() |
4b808611e9 | ||
![]() |
7afe202e20 | ||
![]() |
039810eef3 | ||
![]() |
ff43b45113 | ||
![]() |
7cd4c3bdd3 | ||
![]() |
c12c9e97c2 | ||
![]() |
b3169deda7 | ||
![]() |
0ea41e2f71 | ||
![]() |
3afb564a48 | ||
![]() |
7ff3f752e2 | ||
![]() |
d821ead92a | ||
![]() |
e42ce64127 | ||
![]() |
9d2b0b4e03 | ||
![]() |
b5e6ae0d69 | ||
![]() |
08f1eac8b2 | ||
![]() |
6ed3da33a2 | ||
![]() |
a9a00f139b | ||
![]() |
63d8071dbd | ||
![]() |
d20caa9d60 | ||
![]() |
7e40d4246c | ||
![]() |
5a2b14cfa4 | ||
![]() |
f2d218e5ad | ||
![]() |
b493d5bba5 | ||
![]() |
c9055f2aef | ||
![]() |
9fed7cab5f | ||
![]() |
eb5c4b7c4f | ||
![]() |
2ab3534a4b | ||
![]() |
9816e677a6 | ||
![]() |
fc01a70b65 | ||
![]() |
7221337442 | ||
![]() |
051a1e4772 | ||
![]() |
274741a9d5 | ||
![]() |
25c01adf51 | ||
![]() |
bf2d54c3ef | ||
![]() |
49cb8fd9d3 | ||
![]() |
e536316e3d | ||
![]() |
a6c46eb8e5 | ||
![]() |
1a270374e0 | ||
![]() |
e73eafbd88 | ||
![]() |
9fc3e05b76 | ||
![]() |
31c604331c | ||
![]() |
3fcdaaefe0 | ||
![]() |
20dd744680 | ||
![]() |
e4636b99f7 | ||
![]() |
10e7abb579 | ||
![]() |
d3f03b7acb | ||
![]() |
7e53fc9d6a | ||
![]() |
0059a6de46 | ||
![]() |
22e1758d5b | ||
![]() |
59cdc32970 | ||
![]() |
adb51cf733 | ||
![]() |
d97a9bf8e8 | ||
![]() |
f034472e2a | ||
![]() |
ed328d2df8 | ||
![]() |
1520dc8755 | ||
![]() |
bf601c3126 | ||
![]() |
ab48e4a466 | ||
![]() |
8ef0f5b047 | ||
![]() |
2c14d134be | ||
![]() |
9cd21bb5a0 | ||
![]() |
bd061ac2ee | ||
![]() |
dd3e821857 | ||
![]() |
b38b7019ea | ||
![]() |
2c71ee7853 | ||
![]() |
540c62061d | ||
![]() |
221ef07c8b | ||
![]() |
29fc7ea154 | ||
![]() |
7b157aeff1 | ||
![]() |
e50644edee | ||
![]() |
5f619e6f01 | ||
![]() |
b266fb37a3 | ||
![]() |
c9caf44c2e | ||
![]() |
0c87a9ad2c | ||
![]() |
c680b437f5 | ||
![]() |
4988349677 | ||
![]() |
e35d56defe | ||
![]() |
5c86f332b2 | ||
![]() |
002861f13b | ||
![]() |
dbec3d7c50 | ||
![]() |
89cde158d6 | ||
![]() |
d7b76aadb2 | ||
![]() |
e09fefd389 | ||
![]() |
febc485da6 | ||
![]() |
2ae709c2ba | ||
![]() |
0ccfdd4711 | ||
![]() |
0e59243b83 | ||
![]() |
01bbd04a5a | ||
![]() |
a3b2d384f5 | ||
![]() |
50238f8d72 | ||
![]() |
704470d606 | ||
![]() |
f7e6195466 | ||
![]() |
a0bb7c3ed0 | ||
![]() |
e3a6c9a6cf | ||
![]() |
99598d87a9 | ||
![]() |
b5df50893b | ||
![]() |
f46b3d15cd | ||
![]() |
041b4ec66e | ||
![]() |
703e9673c2 | ||
![]() |
e7bd93b4b0 | ||
![]() |
a401c71d3e | ||
![]() |
5bae233334 | ||
![]() |
c4edd3047f | ||
![]() |
c50da1593a | ||
![]() |
1d06426281 | ||
![]() |
9c5b693dd5 | ||
![]() |
5fecc70db1 | ||
![]() |
ff24023b39 | ||
![]() |
1a04e2d1b8 | ||
![]() |
52c4dd0e35 | ||
![]() |
ff90f6a440 | ||
![]() |
e24d5c172f | ||
![]() |
0918f452a0 | ||
![]() |
839fe49e61 | ||
![]() |
ff050d634a | ||
![]() |
228670df78 | ||
![]() |
cfe4638665 | ||
![]() |
b7352b1345 | ||
![]() |
32ae8fc2d0 | ||
![]() |
86df4c1d8d | ||
![]() |
dc4a88029c | ||
![]() |
5da9b2ede7 | ||
![]() |
29cfcfaf0f | ||
![]() |
61bfd347ea | ||
![]() |
b7436c0b22 | ||
![]() |
8a45dfac5c | ||
![]() |
69f5d8cd0f | ||
![]() |
9a57e8fcb0 | ||
![]() |
a9d75ca4f4 | ||
![]() |
ccb6fc3010 | ||
![]() |
4e9a05fe11 | ||
![]() |
8a294e4134 | ||
![]() |
aad9a539c1 | ||
![]() |
fd6ac529fb | ||
![]() |
009cea1abf | ||
![]() |
4c3c14ec32 | ||
![]() |
636c9db1e3 | ||
![]() |
71f625bbd3 | ||
![]() |
aea2e9a6bb | ||
![]() |
3f6f3c14c4 | ||
![]() |
b1d77b7c03 | ||
![]() |
49233e4734 | ||
![]() |
e2b5ecb78b | ||
![]() |
351ce67eae | ||
![]() |
44af5e439c | ||
![]() |
dbc0d500d8 | ||
![]() |
86736aa480 | ||
![]() |
57eb05c0e3 | ||
![]() |
f6fe6e6bff | ||
![]() |
18560f9430 | ||
![]() |
cb0ba647ed | ||
![]() |
f9fceb7ffc | ||
![]() |
2697c9465b | ||
![]() |
8d204655be | ||
![]() |
8414a22356 | ||
![]() |
36e4a8b444 | ||
![]() |
949c71dc97 | ||
![]() |
a6f6b8da7f | ||
![]() |
a9f123e864 | ||
![]() |
a64a505817 | ||
![]() |
352004221e | ||
![]() |
fc6a3e31c2 | ||
![]() |
a32b58fdf1 | ||
![]() |
2d50ecbecf | ||
![]() |
87f1ffec05 | ||
![]() |
f0dfde9fa1 | ||
![]() |
e36dc2d05e | ||
![]() |
08c8fa2c90 | ||
![]() |
fe6621357e | ||
![]() |
4a0067a2c5 | ||
![]() |
b270ff335d | ||
![]() |
7d2fcf59fd | ||
![]() |
d26c43103d | ||
![]() |
389889ad70 | ||
![]() |
0af73c7903 | ||
![]() |
8aa73bba10 | ||
![]() |
5396b05237 | ||
![]() |
e898345ff1 | ||
![]() |
f39bf9c1e5 | ||
![]() |
9483a9c8c4 | ||
![]() |
52639a0a7c | ||
![]() |
a1e10f384e | ||
![]() |
27d4b3b8ad | ||
![]() |
6438bd84fb | ||
![]() |
4b8e910e3f | ||
![]() |
9c0e463698 | ||
![]() |
2092939353 | ||
![]() |
b9d55fd1ed | ||
![]() |
c1748d586e | ||
![]() |
4c55b9c58c | ||
![]() |
312958c573 | ||
![]() |
65a489ebdd | ||
![]() |
bd392e2efc | ||
![]() |
25ad33a377 | ||
![]() |
abc83f6cb0 | ||
![]() |
f61a82a568 | ||
![]() |
1c3ed71d36 | ||
![]() |
8215a018e9 | ||
![]() |
bf3b678727 | ||
![]() |
f6e3070dd8 | ||
![]() |
4996967c79 | ||
![]() |
5887fe8302 | ||
![]() |
0fc8e8d483 | ||
![]() |
55388724af | ||
![]() |
32efa5d83e | ||
![]() |
275c12150e | ||
![]() |
62468198d6 | ||
![]() |
43d5e7a8cc | ||
![]() |
7524493c3c | ||
![]() |
5759de079b | ||
![]() |
f3d5d27c8f | ||
![]() |
c030be4d3f | ||
![]() |
50cf57affb | ||
![]() |
25c62319a3 | ||
![]() |
27abb1280a | ||
![]() |
c73f0eee4c | ||
![]() |
9208227d92 | ||
![]() |
725e8c69f5 | ||
![]() |
facd4f63ec | ||
![]() |
2e1d14b8b1 | ||
![]() |
e8272759be | ||
![]() |
ebbfab608c | ||
![]() |
a5e1f8fe19 | ||
![]() |
1b2de953d0 | ||
![]() |
df5d03b0bc | ||
![]() |
11c1fe8827 | ||
![]() |
f29622abe1 | ||
![]() |
10e411f8c1 | ||
![]() |
6405799cc2 | ||
![]() |
0afa41d08a | ||
![]() |
48f3dfe455 | ||
![]() |
6f0bfb286a | ||
![]() |
2e54d3f98d | ||
![]() |
67b4dcf8ae | ||
![]() |
ad91362571 | ||
![]() |
76a3e75bc7 | ||
![]() |
e88418a01b | ||
![]() |
27e08d37ea | ||
![]() |
e069687477 | ||
![]() |
ef0e611e52 | ||
![]() |
d58d0e89c7 | ||
![]() |
dfbf225403 | ||
![]() |
e962762046 | ||
![]() |
8166d0de79 | ||
![]() |
d9c33f19e2 | ||
![]() |
0cc3902ffc | ||
![]() |
4752096520 | ||
![]() |
dcadcdf056 | ||
![]() |
b6394b7c6d | ||
![]() |
3ec9bcaed6 | ||
![]() |
d5c59292c8 | ||
![]() |
a20e71b32a | ||
![]() |
1254ec2849 | ||
![]() |
ca144bae90 | ||
![]() |
850368b529 | ||
![]() |
7bbdfc5553 | ||
![]() |
ba73cb1b75 | ||
![]() |
eca36cb868 | ||
![]() |
2a14473e8c | ||
![]() |
9880a425f4 | ||
![]() |
13696b1ec4 | ||
![]() |
764eb960c6 | ||
![]() |
17b55fc23d | ||
![]() |
e20c994b82 | ||
![]() |
465cd3d1f9 | ||
![]() |
412351fd1e | ||
![]() |
5776e70d7c | ||
![]() |
1ccc6e342c | ||
![]() |
a53481e2da | ||
![]() |
01b1b688b1 | ||
![]() |
3d78248aaf | ||
![]() |
cf703f6ac4 | ||
![]() |
2012c769f6 | ||
![]() |
c8a4eb426c | ||
![]() |
351ecf9bd4 | ||
![]() |
e6f42fa6f0 | ||
![]() |
582ac4ac81 | ||
![]() |
6e30bacae3 | ||
![]() |
5d136a18af | ||
![]() |
1a47e4b524 | ||
![]() |
c296b4c348 | ||
![]() |
c52cb7bbad | ||
![]() |
1923e0312b | ||
![]() |
c8998941a5 | ||
![]() |
e41ea42074 | ||
![]() |
ed5f207eba | ||
![]() |
a76b8e745b | ||
![]() |
68d29c5af5 | ||
![]() |
c3acf08c02 | ||
![]() |
ef9e6e4d6e | ||
![]() |
f3158c8b24 | ||
![]() |
ac4a179703 | ||
![]() |
dd4ea51d1f | ||
![]() |
45f6926a5a | ||
![]() |
5ca4bac10a | ||
![]() |
ec026ab3a8 | ||
![]() |
0e5e559283 | ||
![]() |
417a3cdf51 | ||
![]() |
7fa98e288f | ||
![]() |
c693c219f4 | ||
![]() |
e5d4e12457 | ||
![]() |
33212d1abf | ||
![]() |
7a16f846eb | ||
![]() |
6873f09f57 | ||
![]() |
382793de9a | ||
![]() |
d4e76185bd | ||
![]() |
8566dd9100 | ||
![]() |
f479eae714 | ||
![]() |
31067530a5 | ||
![]() |
6505c30c2b | ||
![]() |
7487ffcbcb | ||
![]() |
f79299c240 | ||
![]() |
2f07225984 | ||
![]() |
c99b2b59c2 | ||
![]() |
78633c5768 | ||
![]() |
c041cc483c | ||
![]() |
83a12d980e | ||
![]() |
491f7e96f0 | ||
![]() |
bfb9cb6732 | ||
![]() |
3c9712d683 | ||
![]() |
ca4107d450 | ||
![]() |
148f5d9418 | ||
![]() |
51ab0f0b78 | ||
![]() |
bf0cce4ad8 | ||
![]() |
82d5d50d61 | ||
![]() |
ecb1c77f8b | ||
![]() |
f9a8629157 | ||
![]() |
e2b655a6cc | ||
![]() |
04e6f475b4 | ||
![]() |
0082c5b459 | ||
![]() |
3fd130869e | ||
![]() |
d000440927 | ||
![]() |
4fb750de43 | ||
![]() |
9632ac0e02 | ||
![]() |
35078fd52f | ||
![]() |
0bb81e5b2d | ||
![]() |
35a2258f12 | ||
![]() |
b650704877 | ||
![]() |
0c0dec2534 | ||
![]() |
d1b051a6bd | ||
![]() |
27204aa53c | ||
![]() |
8aedac81a5 | ||
![]() |
42007d03d4 | ||
![]() |
821c1a8bbd | ||
![]() |
bab562dc3a | ||
![]() |
f63fd9696f | ||
![]() |
cd7af19e7c | ||
![]() |
64bd33a94e | ||
![]() |
60e6366521 | ||
![]() |
072b2c445c | ||
![]() |
219fe41831 | ||
![]() |
dcc8bb83af | ||
![]() |
a8e3521f3c | ||
![]() |
706dc6d116 | ||
![]() |
84accb6df6 | ||
![]() |
8421570b18 | ||
![]() |
d355543ac9 | ||
![]() |
3a597c5aa6 | ||
![]() |
c7dddaded4 | ||
![]() |
aae4b2ea5d | ||
![]() |
310e2a0b20 | ||
![]() |
0b04d143ac | ||
![]() |
d1f4c2ab57 | ||
![]() |
7e4d12f880 | ||
![]() |
ecd65003d4 | ||
![]() |
fed37b48a1 | ||
![]() |
3fba3a5e2e | ||
![]() |
9e7e8ab116 | ||
![]() |
1bec1faf6d | ||
![]() |
e64246f642 | ||
![]() |
fb2b7ade41 | ||
![]() |
4de44a99eb | ||
![]() |
39fbf9c56f | ||
![]() |
021055f0b8 | ||
![]() |
c2e0ea97d8 | ||
![]() |
153aadadae | ||
![]() |
1319ff1129 | ||
![]() |
7df72ddb96 | ||
![]() |
1d9ce2afc5 | ||
![]() |
53986279d7 | ||
![]() |
26d6738abb | ||
![]() |
7fa5cab8e3 | ||
![]() |
4f2e2fa297 | ||
![]() |
0473c4c79f | ||
![]() |
a62b6548d2 | ||
![]() |
da390d32f8 | ||
![]() |
8b92456ded | ||
![]() |
14e9375262 | ||
![]() |
d49ee47018 | ||
![]() |
af66753c1b | ||
![]() |
39b35b79ba | ||
![]() |
a2a83c5004 | ||
![]() |
ba1222eae4 | ||
![]() |
31ae337931 | ||
![]() |
bab0ba9c0f | ||
![]() |
c9e224e999 | ||
![]() |
6123cb7c69 | ||
![]() |
8040e3cf95 | ||
![]() |
d447548893 | ||
![]() |
269812e781 | ||
![]() |
65f4d30fd0 | ||
![]() |
c1dfed5c08 | ||
![]() |
835079ad43 | ||
![]() |
17fd9d5107 | ||
![]() |
8613c02d5c | ||
![]() |
dea6675c21 | ||
![]() |
43cf3063e0 | ||
![]() |
3b7a47fb90 | ||
![]() |
79248e8b74 | ||
![]() |
4620ad6124 | ||
![]() |
25cdbacecc | ||
![]() |
4ec636c08f | ||
![]() |
4cb30a22ac | ||
![]() |
c632b0e1d4 | ||
![]() |
714d28a61a | ||
![]() |
c60989a7be | ||
![]() |
11b727fdf7 | ||
![]() |
a1dfd355f7 | ||
![]() |
fcb2cc2471 | ||
![]() |
177617e6e3 | ||
![]() |
e0b4226930 | ||
![]() |
426e6a1b46 | ||
![]() |
66083c5e97 | ||
![]() |
aff4f1e9e2 | ||
![]() |
3c68348868 | ||
![]() |
7f2a6e7403 | ||
![]() |
11069085e3 | ||
![]() |
854d735ab3 | ||
![]() |
a4ab52918b | ||
![]() |
eb895d2095 | ||
![]() |
67cbaabd99 | ||
![]() |
4402a6eb4c | ||
![]() |
6ae1efcf9f | ||
![]() |
1d136ab0df | ||
![]() |
7721049ed7 | ||
![]() |
e6f21873c3 | ||
![]() |
499903bd3d | ||
![]() |
2d0d794a9d | ||
![]() |
a55787f40c | ||
![]() |
990a4d4774 | ||
![]() |
6a60f01753 | ||
![]() |
30ecb58e06 | ||
![]() |
3b689ef39c | ||
![]() |
170d52e0db | ||
![]() |
92d93d658c | ||
![]() |
d7a2816c58 | ||
![]() |
a30d2f291c | ||
![]() |
d33a158585 | ||
![]() |
e21dbc4b60 | ||
![]() |
8a754421fe | ||
![]() |
a73fd55fc2 | ||
![]() |
45630d74f3 | ||
![]() |
a6d31f05ee | ||
![]() |
05f9dede70 | ||
![]() |
7c870556c6 | ||
![]() |
420c860424 | ||
![]() |
828e291538 | ||
![]() |
eea78531a1 | ||
![]() |
c8ccb06f11 | ||
![]() |
f5b7cc81d8 | ||
![]() |
ae784dc74c | ||
![]() |
056c72d50d | ||
![]() |
b5714cd70f | ||
![]() |
74aca2137b | ||
![]() |
d09dff3ae3 | ||
![]() |
d280380c8d | ||
![]() |
8a08d1fb5d | ||
![]() |
ea652e3587 | ||
![]() |
7a6df38515 | ||
![]() |
bba6d6897d | ||
![]() |
33c08812cc | ||
![]() |
f68a3a9334 | ||
![]() |
0698be3995 | ||
![]() |
064589934c | ||
![]() |
e9e92afc9e | ||
![]() |
e86f2e993f | ||
![]() |
d26cd85306 | ||
![]() |
73f80a8ea1 | ||
![]() |
b7dff4bbab | ||
![]() |
31d964c16a | ||
![]() |
7f895abc24 | ||
![]() |
0f406c38eb | ||
![]() |
6433da13a3 | ||
![]() |
fa1adfd934 | ||
![]() |
36ffef083b | ||
![]() |
fe89dcdc08 | ||
![]() |
be36eef939 | ||
![]() |
6a0268b852 | ||
![]() |
b7b23ffdb2 | ||
![]() |
ad76709d00 | ||
![]() |
53c231a7eb | ||
![]() |
d44ce82aa1 | ||
![]() |
a055de48e4 | ||
![]() |
37b8d665fe | ||
![]() |
dd7c8dabb1 | ||
![]() |
e41a9875e3 | ||
![]() |
c5c42c4338 | ||
![]() |
531428b8b0 | ||
![]() |
ea8068e001 | ||
![]() |
7842a55c81 | ||
![]() |
51d39862b1 | ||
![]() |
bfea6ca79b | ||
![]() |
6297395018 | ||
![]() |
a5b49dbfa6 | ||
![]() |
7c0d777173 | ||
![]() |
74878276fc | ||
![]() |
226e3b1dad | ||
![]() |
7752794fc5 | ||
![]() |
b3094d6a53 | ||
![]() |
e3640e710f | ||
![]() |
2ef64b55c5 | ||
![]() |
7f6672bb37 | ||
![]() |
68a3b31628 | ||
![]() |
1b35855e68 | ||
![]() |
1e1837000d | ||
![]() |
e2d5257632 | ||
![]() |
387c75793b | ||
![]() |
4f3a74d08a | ||
![]() |
072cd5b83e | ||
![]() |
cfd42ea162 | ||
![]() |
b55544b860 | ||
![]() |
5becaebdda | ||
![]() |
1814e4a46b | ||
![]() |
4f8f59f705 | ||
![]() |
aca306d120 | ||
![]() |
694395ac91 | ||
![]() |
092bca0d63 | ||
![]() |
a386bb476f | ||
![]() |
39a520f552 | ||
![]() |
663f84f8b4 | ||
![]() |
8677d47777 | ||
![]() |
4f1a28d460 | ||
![]() |
7b142525b4 | ||
![]() |
7d4f279206 | ||
![]() |
51233e1931 | ||
![]() |
907c14aa98 | ||
![]() |
fb055750df | ||
![]() |
fad05d5a2e | ||
![]() |
9580b13b9f | ||
![]() |
367ae906c3 | ||
![]() |
f8d98ac494 | ||
![]() |
3e8fd48dc0 | ||
![]() |
003326f2eb | ||
![]() |
b5af3aa048 | ||
![]() |
a919b015b4 | ||
![]() |
f94e9b6b1e | ||
![]() |
1ed8e63d59 | ||
![]() |
d97bc95798 | ||
![]() |
5c56f15c67 | ||
![]() |
3fdb68cba8 | ||
![]() |
85c46becdf | ||
![]() |
0cbd373817 | ||
![]() |
fdbc59a159 | ||
![]() |
0db37bb55c | ||
![]() |
8027facb39 | ||
![]() |
2ff2750628 | ||
![]() |
16c2dc2aaf | ||
![]() |
eae5c17b87 | ||
![]() |
a59cde91ad | ||
![]() |
1f243ae37e | ||
![]() |
603f82977e | ||
![]() |
2d70422a6f | ||
![]() |
e2c8b21195 | ||
![]() |
7adaeacd0b | ||
![]() |
dc2279b74f | ||
![]() |
75275c4e93 | ||
![]() |
65d3dc9cb8 | ||
![]() |
66aa02fc34 | ||
![]() |
be6b4ee47f | ||
![]() |
90f909d2ea | ||
![]() |
3aaa92fdff | ||
![]() |
5efd076c08 | ||
![]() |
09fd505f08 | ||
![]() |
042ccde441 | ||
![]() |
442030b6ca | ||
![]() |
1df9ae53f8 | ||
![]() |
5f535e9756 | ||
![]() |
70faeb2fa8 | ||
![]() |
469c0db981 | ||
![]() |
440e428aa4 | ||
![]() |
dde70c95a4 | ||
![]() |
09d1846261 | ||
![]() |
34d26a517d | ||
![]() |
d24b88271c | ||
![]() |
f22115792a | ||
![]() |
82a30558e1 | ||
![]() |
847fe5adca | ||
![]() |
775b51c6a1 | ||
![]() |
e0ad5a9009 | ||
![]() |
1bf01a9081 | ||
![]() |
6ae59bb43d | ||
![]() |
adf2a463fd | ||
![]() |
80aaf66963 | ||
![]() |
560251ab2a | ||
![]() |
864c5d8908 | ||
![]() |
742c21506c | ||
![]() |
a6faccb4d9 | ||
![]() |
69fd3e8937 | ||
![]() |
41233d7f25 | ||
![]() |
07286d1d76 | ||
![]() |
08148c5830 | ||
![]() |
969bdb06ce | ||
![]() |
b0bb692af4 | ||
![]() |
7bf6fd316f | ||
![]() |
c1f5e04d6c | ||
![]() |
5a67e72389 | ||
![]() |
91c9b11647 | ||
![]() |
929600d7f7 | ||
![]() |
163d0c55ab | ||
![]() |
327ccb241e | ||
![]() |
6b3c7b0854 | ||
![]() |
681dcb51da | ||
![]() |
576d5021fd | ||
![]() |
6cd76f00ac | ||
![]() |
6f63a62a8d | ||
![]() |
8867a0fcfb | ||
![]() |
bb2582717f | ||
![]() |
d62ef35860 | ||
![]() |
59c5956f93 | ||
![]() |
e4f055597c | ||
![]() |
4c49beb3c7 | ||
![]() |
8ff742d9ab | ||
![]() |
d63cd8b4cd | ||
![]() |
42b4a166ec | ||
![]() |
c27fd0f01a | ||
![]() |
dcb4a0a81e | ||
![]() |
17da9fddc3 | ||
![]() |
31aa3c55ca | ||
![]() |
eca3685ea0 | ||
![]() |
bd216c5c63 | ||
![]() |
31ff76427c | ||
![]() |
3f0503c296 | ||
![]() |
c18050bda0 | ||
![]() |
6542be5588 | ||
![]() |
9fb60b8015 | ||
![]() |
1177b856a0 | ||
![]() |
c0adaa8de8 | ||
![]() |
ae8700447e | ||
![]() |
d64a4ef2b4 | ||
![]() |
2229aa6ccc | ||
![]() |
872b468415 | ||
![]() |
9f022a7433 | ||
![]() |
85a958e300 | ||
![]() |
0ee56195ae | ||
![]() |
48f52db1d9 | ||
![]() |
d2c7afeef0 | ||
![]() |
644aec791e | ||
![]() |
b70a0325c5 | ||
![]() |
268387f829 | ||
![]() |
b975caef1e | ||
![]() |
54fe1c7d55 | ||
![]() |
89c1274d56 | ||
![]() |
f9ca3f1c27 | ||
![]() |
26dbc30279 | ||
![]() |
4bee316425 | ||
![]() |
1e22b1e959 | ||
![]() |
e077ad56bd | ||
![]() |
f1e00f8c8e | ||
![]() |
9a152e588e | ||
![]() |
16f42a3d03 | ||
![]() |
6c8d0f1852 | ||
![]() |
b59cf6572b | ||
![]() |
4fa11dfa68 | ||
![]() |
352bdd9fb5 | ||
![]() |
96ff9a162c | ||
![]() |
af15a4e710 | ||
![]() |
18426b71e4 | ||
![]() |
7063aa6009 | ||
![]() |
286ca07cc8 | ||
![]() |
58b6311821 | ||
![]() |
e553c0768e | ||
![]() |
62d4b29662 | ||
![]() |
0759140dc2 | ||
![]() |
a943bc6c80 | ||
![]() |
16bc60644d | ||
![]() |
347393b864 | ||
![]() |
bf6b11222a | ||
![]() |
823ae7d1aa | ||
![]() |
28031cfa3e | ||
![]() |
292c2d0c53 | ||
![]() |
869775ec7a | ||
![]() |
783b179af7 | ||
![]() |
28454ea4cd | ||
![]() |
9f4b666ef0 | ||
![]() |
80214640b1 | ||
![]() |
4310c14497 | ||
![]() |
aebb6d2fcc | ||
![]() |
af35c9258e | ||
![]() |
7a43231c43 | ||
![]() |
1bf55c130b | ||
![]() |
95a74a7f19 | ||
![]() |
b78b28ea0e | ||
![]() |
aed7b3fbb2 | ||
![]() |
1ade7bcb2d | ||
![]() |
21bbafb63d | ||
![]() |
1cfc6ac3c6 | ||
![]() |
c3aa834d80 | ||
![]() |
68d0d045c0 | ||
![]() |
72d6471ab8 | ||
![]() |
22aecdfc6f | ||
![]() |
ee0b6e835f | ||
![]() |
8292024306 | ||
![]() |
84cfcf2b4a | ||
![]() |
ea762b7295 | ||
![]() |
d51c0f13c0 | ||
![]() |
6ceb975a3a | ||
![]() |
9a40d6ef50 | ||
![]() |
32195f77d9 | ||
![]() |
578e5a0d7a | ||
![]() |
1242f43769 | ||
![]() |
3bb6430495 | ||
![]() |
aae633277f | ||
![]() |
996c50e8f2 | ||
![]() |
95c883ae9b | ||
![]() |
35a725fa1e | ||
![]() |
78c1adafcd | ||
![]() |
e15071228e | ||
![]() |
ac48ff1fd6 | ||
![]() |
428684bc1e | ||
![]() |
81b7653c9c | ||
![]() |
45736707bd | ||
![]() |
cdfbe5b523 | ||
![]() |
cdb9c59662 | ||
![]() |
9c30f4cc68 | ||
![]() |
acf3f6fb65 | ||
![]() |
23f99908db | ||
![]() |
a0046a2e55 | ||
![]() |
e30512931b | ||
![]() |
e207c6ad84 | ||
![]() |
9d7f76773d | ||
![]() |
5f2808ec2f | ||
![]() |
be91cfb261 | ||
![]() |
0eadda77b0 | ||
![]() |
b2388b6fe7 | ||
![]() |
1a763ae974 | ||
![]() |
38dfab11b4 | ||
![]() |
7c31592850 | ||
![]() |
57bee74225 | ||
![]() |
fa351cd37c | ||
![]() |
68e7e5a51c | ||
![]() |
4d31ad3bdc | ||
![]() |
f4f1164b94 | ||
![]() |
d4e0e1518a | ||
![]() |
bd0be41064 | ||
![]() |
4118a289a6 | ||
![]() |
1d5f8d5a52 | ||
![]() |
fd1dc24ac6 | ||
![]() |
be1e4c0a1d | ||
![]() |
c2028f7378 | ||
![]() |
4b0f203049 | ||
![]() |
23ff8178a0 | ||
![]() |
93cfee8026 | ||
![]() |
b6920025b2 | ||
![]() |
fb29ac27a2 | ||
![]() |
4c03cebef3 | ||
![]() |
244c4be8cc | ||
![]() |
9b28c732c6 | ||
![]() |
5dfb33ebee | ||
![]() |
2b30cde125 | ||
![]() |
f9b3e61c0f | ||
![]() |
83a92f03fc | ||
![]() |
d27291b997 | ||
![]() |
2c995cf145 | ||
![]() |
2822fa4a40 | ||
![]() |
ccf3da2a5a | ||
![]() |
5348b36a7c | ||
![]() |
947a6034e3 | ||
![]() |
65d08beaa4 | ||
![]() |
9770bc371b | ||
![]() |
22f9f75914 | ||
![]() |
54c9dd4173 | ||
![]() |
0fc267dfc7 | ||
![]() |
c5db457700 | ||
![]() |
15a7d2ef75 | ||
![]() |
071272a27f | ||
![]() |
655327a8b1 | ||
![]() |
15b87af8ed | ||
![]() |
a0b3d861fe | ||
![]() |
718c494013 | ||
![]() |
5c9755ecc1 | ||
![]() |
11e88019c2 | ||
![]() |
a783637a7a | ||
![]() |
7210ad7ed9 | ||
![]() |
1876c21e3e | ||
![]() |
6516a6ff7e | ||
![]() |
85195436c1 | ||
![]() |
c6512013bb | ||
![]() |
81a070d03d | ||
![]() |
0ef1d178d2 | ||
![]() |
762f1b1fc9 | ||
![]() |
7ad593d674 | ||
![]() |
13522c8f19 | ||
![]() |
d2938e82db | ||
![]() |
f95d4ca106 | ||
![]() |
486bafd009 | ||
![]() |
341c99b4fa | ||
![]() |
83095e8989 | ||
![]() |
71ba4bc31c | ||
![]() |
894ec07cc8 | ||
![]() |
59091100e4 | ||
![]() |
e5485ab650 | ||
![]() |
6c493d10d2 | ||
![]() |
840f599631 | ||
![]() |
5a76e61b1e | ||
![]() |
7b4366bfef | ||
![]() |
8dee5c5fe8 | ||
![]() |
b2e6d222cd | ||
![]() |
2712c44004 | ||
![]() |
82625a3080 | ||
![]() |
49f9ad66db | ||
![]() |
0dfab4d93c | ||
![]() |
5cd7f23065 | ||
![]() |
27453afa4e | ||
![]() |
369d175694 | ||
![]() |
fc465d6d93 | ||
![]() |
904a0b26ea | ||
![]() |
c13f132399 | ||
![]() |
db968bc6b0 | ||
![]() |
7abe8875bd | ||
![]() |
dc9f304d94 | ||
![]() |
1ca241615d | ||
![]() |
b8aa84002a | ||
![]() |
10cc0b1d5b | ||
![]() |
11d9c203c1 | ||
![]() |
c9ab454c3c | ||
![]() |
4a55692885 | ||
![]() |
88c129e705 | ||
![]() |
a09bd80636 | ||
![]() |
237ecb3adf | ||
![]() |
9d65b77f13 | ||
![]() |
97f2becc9e | ||
![]() |
80b48f01fb | ||
![]() |
f4160c363b | ||
![]() |
4fee9cc039 | ||
![]() |
36f47ade70 | ||
![]() |
8db6f3129c | ||
![]() |
75630a36f8 | ||
![]() |
642bc91a76 | ||
![]() |
d69926ee56 | ||
![]() |
4758403d44 | ||
![]() |
4b0ec5c28a | ||
![]() |
4b2a9e5e49 | ||
![]() |
1449c51d49 | ||
![]() |
a451705e0b | ||
![]() |
2e6db39173 | ||
![]() |
373f75253c | ||
![]() |
724842084e | ||
![]() |
8f3635b167 | ||
![]() |
11605a36f7 | ||
![]() |
533f81d625 | ||
![]() |
aacb9e44e8 | ||
![]() |
c6e3f1bca6 | ||
![]() |
a933d4aeb6 | ||
![]() |
d2be58ba31 | ||
![]() |
bbeb0461c4 | ||
![]() |
14fd08e225 | ||
![]() |
f99352f7e0 | ||
![]() |
b51cbc4207 | ||
![]() |
7a895adec9 | ||
![]() |
4fe0c95ccb | ||
![]() |
726b0e73d9 | ||
![]() |
88ccd60a08 | ||
![]() |
e6c16e9981 | ||
![]() |
1bd408937a | ||
![]() |
4d00dfd308 | ||
![]() |
75326d2271 | ||
![]() |
76fe2e4871 | ||
![]() |
f977e9da2b | ||
![]() |
16ae46e958 | ||
![]() |
73eea154d5 | ||
![]() |
0d36e66125 | ||
![]() |
970838ed09 | ||
![]() |
0a21816a5a | ||
![]() |
30a542e763 | ||
![]() |
ebe64e24f1 | ||
![]() |
caa5b20791 | ||
![]() |
e2ad9ed746 | ||
![]() |
32c0e7c2ae | ||
![]() |
6c564c7b7f | ||
![]() |
c81e3a3be4 | ||
![]() |
6b1b9ef7ec | ||
![]() |
c26a8b8718 | ||
![]() |
4a89a475bd | ||
![]() |
c53483a3b2 | ||
![]() |
fe24745815 | ||
![]() |
b5e75793e1 | ||
![]() |
734cc989de | ||
![]() |
2642750466 | ||
![]() |
ec9cc72320 | ||
![]() |
c97a9d83c6 | ||
![]() |
8cf15c7f5c | ||
![]() |
adc76ca1b8 | ||
![]() |
8f8892440c | ||
![]() |
570843150d | ||
![]() |
f3fc9e4142 | ||
![]() |
075fcb77a8 | ||
![]() |
e5899ff717 | ||
![]() |
2fb3970027 | ||
![]() |
1a4efa1b8c | ||
![]() |
f31c1480f3 | ||
![]() |
291d4be772 | ||
![]() |
52584ec2be | ||
![]() |
3bc08e5222 | ||
![]() |
672f8d1719 | ||
![]() |
420c8b49e2 | ||
![]() |
f921997ee6 | ||
![]() |
4e520d13dd | ||
![]() |
72f656ffef | ||
![]() |
b60239d5e5 | ||
![]() |
d02e280c3c | ||
![]() |
6535b0966e | ||
![]() |
82dbacbee5 | ||
![]() |
2432901974 | ||
![]() |
ebb5d58c14 | ||
![]() |
605e365405 | ||
![]() |
2617e5092b | ||
![]() |
d41ddf380c | ||
![]() |
a72c3ea9d7 | ||
![]() |
8be733efee | ||
![]() |
b9609286ea | ||
![]() |
2b186fdb0d | ||
![]() |
3012fee013 | ||
![]() |
01db114724 | ||
![]() |
e05688d639 | ||
![]() |
5ab995d8ca | ||
![]() |
4248741b11 | ||
![]() |
4b8ecc7634 | ||
![]() |
25d04c759c | ||
![]() |
b4ec84030e | ||
![]() |
925b030718 | ||
![]() |
9eba789c32 | ||
![]() |
3e6ae4afda | ||
![]() |
27abb38ecb | ||
![]() |
29e8761373 | ||
![]() |
a04299c59e | ||
![]() |
d7bf3c51d9 | ||
![]() |
3ddf5a4ec7 | ||
![]() |
f2d6817d8a | ||
![]() |
31821e6309 | ||
![]() |
1ce257c721 | ||
![]() |
8dd971b25e | ||
![]() |
7f507935b1 | ||
![]() |
3b1ba27043 | ||
![]() |
4b7c5aa05c | ||
![]() |
5fca02c712 | ||
![]() |
31ddd3f668 | ||
![]() |
f35f6d2348 | ||
![]() |
02d34a0238 | ||
![]() |
e4bbb56f6b | ||
![]() |
96d30e28d4 | ||
![]() |
41b73ff892 | ||
![]() |
afc4e45fb0 | ||
![]() |
8778ddd5c5 | ||
![]() |
3089ffa8e7 | ||
![]() |
15cb0e4ff3 | ||
![]() |
2f3c6d5b58 | ||
![]() |
ead5e8d855 | ||
![]() |
fd9a9ecc63 | ||
![]() |
bbb8ea7ec2 | ||
![]() |
667ed94e29 | ||
![]() |
23f1798d20 | ||
![]() |
6f3c126805 | ||
![]() |
a9ae70cff1 | ||
![]() |
d7a8c50c98 | ||
![]() |
9e56318498 | ||
![]() |
2570f2d6f2 | ||
![]() |
928df2dcd1 | ||
![]() |
2decb8115c | ||
![]() |
9d26c16471 | ||
![]() |
5893506528 | ||
![]() |
df0d33c3cd | ||
![]() |
359f54d3c1 | ||
![]() |
68ce1b18c4 | ||
![]() |
61ba2e0f35 | ||
![]() |
76d7802650 | ||
![]() |
9fa1a334e6 | ||
![]() |
9be16916b7 | ||
![]() |
4a5365f6a0 | ||
![]() |
0ced5509fc | ||
![]() |
bd6b9ff1da | ||
![]() |
edee28acf0 | ||
![]() |
127efe9a52 | ||
![]() |
53e8b3ed3e | ||
![]() |
a23ebead68 | ||
![]() |
f39d459555 | ||
![]() |
6a17fe375e | ||
![]() |
6e7d25ed42 | ||
![]() |
a676ff23de | ||
![]() |
85513476ce | ||
![]() |
e62443933c | ||
![]() |
3b48aa5911 | ||
![]() |
b7daee533a | ||
![]() |
7a14ab825e | ||
![]() |
4323ca88c3 | ||
![]() |
4bc3067725 | ||
![]() |
28f2a7f99c | ||
![]() |
72218171b3 | ||
![]() |
0d9f5ef363 | ||
![]() |
7b5c4359c6 | ||
![]() |
72a80f559a | ||
![]() |
dac624231f | ||
![]() |
510e53de70 | ||
![]() |
d8963ea25a | ||
![]() |
9ed06444e1 | ||
![]() |
df50b95e5a | ||
![]() |
12ff280d3b | ||
![]() |
16c2929bb4 | ||
![]() |
422754ed63 | ||
![]() |
aa7389432e | ||
![]() |
bd45f6bd8e | ||
![]() |
999c1a5357 | ||
![]() |
a323679771 | ||
![]() |
1fa4a2d256 | ||
![]() |
8c73558165 | ||
![]() |
ed61c1dd58 | ||
![]() |
8be444a25e | ||
![]() |
6306d44955 | ||
![]() |
6fff2e5957 | ||
![]() |
dd79e37933 | ||
![]() |
a1b6a91642 | ||
![]() |
60d67e5428 | ||
![]() |
5bb963fa82 | ||
![]() |
83fa51a580 | ||
![]() |
0281914507 | ||
![]() |
9231b80aa9 | ||
![]() |
3e044db9f1 | ||
![]() |
855e9367d4 | ||
![]() |
db0dd6af09 | ||
![]() |
b7afb8c887 | ||
![]() |
02c9ada876 | ||
![]() |
f811b1157c | ||
![]() |
797aadaf26 | ||
![]() |
f1a0e5a313 | ||
![]() |
f2540bae23 | ||
![]() |
a1a7448868 | ||
![]() |
e373620393 | ||
![]() |
23dcfe5075 | ||
![]() |
7bab279c6a | ||
![]() |
953d7f6193 | ||
![]() |
1d6dd3aa5e | ||
![]() |
9dd9e523ed | ||
![]() |
66cbfca99c | ||
![]() |
b0e6b48c50 | ||
![]() |
bb1937ab88 | ||
![]() |
86848b39db | ||
![]() |
f2506bb23b | ||
![]() |
3372ddc63d | ||
![]() |
3fe9c20188 | ||
![]() |
95428b4cfe | ||
![]() |
968ff4b619 | ||
![]() |
a07a835eb0 | ||
![]() |
667457989e | ||
![]() |
bedf1b7483 | ||
![]() |
53c182ad37 | ||
![]() |
ce45c81069 | ||
![]() |
1ee85295f2 | ||
![]() |
521c080989 | ||
![]() |
64541e0e35 | ||
![]() |
c3d4ef284d | ||
![]() |
ebb5fadba1 | ||
![]() |
c569d022ec | ||
![]() |
58586ea840 | ||
![]() |
27f431a027 | ||
![]() |
595dfe7e24 | ||
![]() |
766f6c045d | ||
![]() |
0a0713f0e2 | ||
![]() |
5e5b9f2205 | ||
![]() |
e6ff3c287d | ||
![]() |
295585379e | ||
![]() |
c7609ba5e7 | ||
![]() |
8e75980ebd | ||
![]() |
6682c43dfa | ||
![]() |
049807e3ab | ||
![]() |
336f12b24d | ||
![]() |
be5330b6ae | ||
![]() |
e90829eef2 | ||
![]() |
dc4c1bc225 | ||
![]() |
a14d12b0c1 | ||
![]() |
526a414743 | ||
![]() |
7fd5634a51 | ||
![]() |
cb9f36a153 | ||
![]() |
ac0b095941 | ||
![]() |
cda9bad233 | ||
![]() |
41db8a1264 | ||
![]() |
e7e785fd60 | ||
![]() |
300d3a1f46 | ||
![]() |
356554c08d | ||
![]() |
ced28ad006 | ||
![]() |
0272048899 | ||
![]() |
5b0d30bc09 | ||
![]() |
5859d4b01f | ||
![]() |
1dab1314ff | ||
![]() |
1f7e1daa73 | ||
![]() |
d27b01f02c | ||
![]() |
67c56ab97b | ||
![]() |
6ccab2bef9 | ||
![]() |
475aa4879c | ||
![]() |
4452473735 | ||
![]() |
e0a556e987 | ||
![]() |
de51bdda5b | ||
![]() |
386cd4f35f | ||
![]() |
477311dac6 | ||
![]() |
948adfd28c | ||
![]() |
aebbd7673b | ||
![]() |
3baaf6b7c4 | ||
![]() |
7ad2d86929 | ||
![]() |
4a14221e2b | ||
![]() |
114ebf9fe1 | ||
![]() |
0d7bb1286a | ||
![]() |
92a4ee5652 | ||
![]() |
9fea094b4e | ||
![]() |
d332e491ad | ||
![]() |
0ccfe33427 | ||
![]() |
596c334fcb | ||
![]() |
ca4450858f | ||
![]() |
f3ec83fe31 | ||
![]() |
c4ada8c9f0 | ||
![]() |
23df5d8af7 | ||
![]() |
289acade1e | ||
![]() |
a99f99779a | ||
![]() |
5c14ca030a | ||
![]() |
0fc6a027a7 | ||
![]() |
3c0d97ef69 | ||
![]() |
5b4f98d414 | ||
![]() |
7ebfcd3807 | ||
![]() |
3ef0634dd2 | ||
![]() |
0928c9739f | ||
![]() |
e009f21a72 | ||
![]() |
606c412616 | ||
![]() |
975b5127d6 | ||
![]() |
60c9ffef30 | ||
![]() |
99861259d7 | ||
![]() |
067ec30c56 | ||
![]() |
5a102c2ab7 | ||
![]() |
4b017e2096 | ||
![]() |
8495ce96a3 | ||
![]() |
55caf4f648 | ||
![]() |
c123f0091d | ||
![]() |
7c65d44976 | ||
![]() |
5b8d12a80c | ||
![]() |
3951a2b22a | ||
![]() |
69a74a30e8 | ||
![]() |
f3ee5b55e9 | ||
![]() |
f6cc9f7caa | ||
![]() |
a5d0ecdb13 | ||
![]() |
71cbc9cfb0 | ||
![]() |
88625c656d | ||
![]() |
971b15ac67 | ||
![]() |
a4edcc48ca | ||
![]() |
1778dd4df9 | ||
![]() |
311e837196 | ||
![]() |
3b00cfd6c4 | ||
![]() |
1c7ca4bc6f | ||
![]() |
38e7b597d6 | ||
![]() |
808ee19180 | ||
![]() |
c2a0c22bd9 | ||
![]() |
e785ad5401 | ||
![]() |
bacddc3673 | ||
![]() |
7dd0dabaf5 | ||
![]() |
92f8b043ce | ||
![]() |
2e5fd7e90d | ||
![]() |
407c46cb03 | ||
![]() |
12ce448f2d | ||
![]() |
340530566c | ||
![]() |
f4a55eafd7 | ||
![]() |
fc8f270a9f | ||
![]() |
e7ce8f7a13 | ||
![]() |
3d1cce2a29 | ||
![]() |
6be47488a4 | ||
![]() |
5b94bb894e | ||
![]() |
26aa399bea | ||
![]() |
af0c213024 | ||
![]() |
88e036ddb2 | ||
![]() |
8012de3ba4 | ||
![]() |
e2b1d5438d | ||
![]() |
fe66f93a01 | ||
![]() |
581dffe2d4 | ||
![]() |
6f22006bf7 | ||
![]() |
fe54700687 | ||
![]() |
f0c0131ed1 | ||
![]() |
52750d933b | ||
![]() |
7e7a85abfd | ||
![]() |
c1b8107aaf | ||
![]() |
0bdcce609f | ||
![]() |
6b702f8014 | ||
![]() |
078ab26fd2 | ||
![]() |
efe81e0856 | ||
![]() |
41d637affe | ||
![]() |
10cc11bab1 | ||
![]() |
34ca5d6d8a | ||
![]() |
b27c778cb7 | ||
![]() |
f311a1bb30 | ||
![]() |
7a450ed41c | ||
![]() |
944f0169cb | ||
![]() |
4da0e0c223 | ||
![]() |
52c0b45f41 | ||
![]() |
fcef3be5c7 | ||
![]() |
ef7b936d60 | ||
![]() |
e0eac6ba25 | ||
![]() |
50af0da13f | ||
![]() |
a8d87a1fee | ||
![]() |
a66ed437d6 | ||
![]() |
b95f53a7c7 | ||
![]() |
d7eacb2a2f | ||
![]() |
41ec337ba0 | ||
![]() |
5e61014e98 | ||
![]() |
0a4d49accb | ||
![]() |
3d9301a0f7 | ||
![]() |
1b8d242505 | ||
![]() |
463ad6f94b | ||
![]() |
e4e315f723 | ||
![]() |
7fecec128f | ||
![]() |
a15b647d13 | ||
![]() |
feab956ea9 | ||
![]() |
4b7a41922c | ||
![]() |
61762bf299 | ||
![]() |
6eb769e6d5 | ||
![]() |
4c8bd7a70c | ||
![]() |
d9cf91210e | ||
![]() |
fcf5da66c0 | ||
![]() |
b6edbf9d0b | ||
![]() |
596f995af8 | ||
![]() |
8ce176aaba | ||
![]() |
f185ba7a21 | ||
![]() |
7c28eca6de | ||
![]() |
9fa95a9a21 | ||
![]() |
6ccb68aaf1 | ||
![]() |
6a76a3642e | ||
![]() |
a50eb3579a | ||
![]() |
4a74027848 | ||
![]() |
98bdfc821e | ||
![]() |
176c712eeb | ||
![]() |
7b26ecc0dc | ||
![]() |
92e909568c | ||
![]() |
e3f7e0d14a | ||
![]() |
5fcda34c45 | ||
![]() |
bd23584073 | ||
![]() |
6d6876ca39 | ||
![]() |
9ebf6439b2 | ||
![]() |
1404d39ffe | ||
![]() |
7153c7a941 | ||
![]() |
3437e23c8e | ||
![]() |
eb39add6fc | ||
![]() |
e13bffbb2f | ||
![]() |
e347cb538d | ||
![]() |
d799c03f0c | ||
![]() |
c86675f644 | ||
![]() |
a00fe09c66 | ||
![]() |
e0fe0a2835 | ||
![]() |
e1f48b5028 | ||
![]() |
e4a2677de4 | ||
![]() |
ab40156c16 | ||
![]() |
804a1ed7b3 | ||
![]() |
4de98fd782 | ||
![]() |
d8fd5e9bb4 | ||
![]() |
8424f9c34b | ||
![]() |
70016d7641 | ||
![]() |
7ced977f2a | ||
![]() |
5ac9b8d545 | ||
![]() |
a683108b5d | ||
![]() |
54dd9e94ab | ||
![]() |
4f5389998f | ||
![]() |
8a61c21051 | ||
![]() |
b71b14cd06 | ||
![]() |
9b2b7dabd1 | ||
![]() |
145e7b00ee | ||
![]() |
07c80dfcda | ||
![]() |
0e52c9a778 | ||
![]() |
94bd179256 | ||
![]() |
11c38ca4e8 | ||
![]() |
dc71d11a21 | ||
![]() |
86130b2954 | ||
![]() |
c2787c1ce5 | ||
![]() |
e81338b4f5 | ||
![]() |
40ab2d5e5a | ||
![]() |
7a703d10f4 | ||
![]() |
e385c8435b | ||
![]() |
4c5f908e3a | ||
![]() |
13eca6012d | ||
![]() |
27e1294630 | ||
![]() |
cb3e3e024d | ||
![]() |
79bdec32b8 | ||
![]() |
c2fb71c41f | ||
![]() |
c153dba5bc | ||
![]() |
fa2c2917c1 | ||
![]() |
7685b32ac0 | ||
![]() |
2a06f4dbf4 | ||
![]() |
49b618bb0c | ||
![]() |
20ec3900be | ||
![]() |
605be1a6ec | ||
![]() |
33b67de32e | ||
![]() |
5a1e806d14 | ||
![]() |
cf73d777db | ||
![]() |
47231977d7 | ||
![]() |
d29b280d82 | ||
![]() |
77514c0a06 | ||
![]() |
1ffedb291c | ||
![]() |
341a27e56b | ||
![]() |
8f251848ef | ||
![]() |
739b7bfc05 | ||
![]() |
4046a16d85 | ||
![]() |
01b49e8d59 | ||
![]() |
52dbd35118 | ||
![]() |
92439abeb4 | ||
![]() |
9bce35e335 | ||
![]() |
94cb7bf6bd | ||
![]() |
cdf53cb3e8 | ||
![]() |
30bd74cb6c | ||
![]() |
e72ef9e061 | ||
![]() |
3f4bba57f4 | ||
![]() |
38c24fd100 | ||
![]() |
c6ed88579f | ||
![]() |
7f9462ebf3 | ||
![]() |
4e44081fdf | ||
![]() |
3cd1c2d723 | ||
![]() |
907be3025c | ||
![]() |
815da05b29 | ||
![]() |
a2083fc901 | ||
![]() |
06cb7497c8 | ||
![]() |
1e12cba176 | ||
![]() |
1d0c812e44 | ||
![]() |
4948ad95b0 | ||
![]() |
dd801636af | ||
![]() |
a110911371 | ||
![]() |
22fd4ec722 | ||
![]() |
5db70bea3c | ||
![]() |
00ff99cc7d | ||
![]() |
a51eaa93b5 | ||
![]() |
eddaee3a80 | ||
![]() |
2b432ea829 | ||
![]() |
09ab55b85d | ||
![]() |
183e0f4d23 | ||
![]() |
a8c17e5d05 | ||
![]() |
2e65d6c02c | ||
![]() |
07da8950c4 | ||
![]() |
c206846816 | ||
![]() |
6aac4191f8 | ||
![]() |
5a7b66e207 | ||
![]() |
da2821ab36 | ||
![]() |
7556845079 | ||
![]() |
e646f02e27 | ||
![]() |
39ead80df3 | ||
![]() |
71e221f6ec | ||
![]() |
7c7032c59e | ||
![]() |
2b88c987da | ||
![]() |
3d49ffa134 | ||
![]() |
5ed987adcd | ||
![]() |
a463c59733 | ||
![]() |
1726c4237b | ||
![]() |
f2f869db89 | ||
![]() |
e37e986276 | ||
![]() |
f1241af91d | ||
![]() |
7ae6777fd6 | ||
![]() |
a169d37557 | ||
![]() |
5d5529ff15 | ||
![]() |
db90a7a334 | ||
![]() |
bb9c1faffa | ||
![]() |
b4c4dc8cfb | ||
![]() |
c3b2ab0085 | ||
![]() |
f087e313d4 | ||
![]() |
f88cf99b67 | ||
![]() |
f394968bd0 | ||
![]() |
70def85ba1 |
137
.clang-format
Normal file
137
.clang-format
Normal file
@@ -0,0 +1,137 @@
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -1
|
||||
AlignAfterOpenBracket: Align
|
||||
AlignConsecutiveAssignments: false
|
||||
AlignConsecutiveDeclarations: false
|
||||
AlignEscapedNewlines: DontAlign
|
||||
AlignOperands: true
|
||||
AlignTrailingComments: true
|
||||
AllowAllParametersOfDeclarationOnNextLine: true
|
||||
AllowShortBlocksOnASingleLine: false
|
||||
AllowShortCaseLabelsOnASingleLine: false
|
||||
AllowShortFunctionsOnASingleLine: All
|
||||
AllowShortIfStatementsOnASingleLine: false
|
||||
AllowShortLoopsOnASingleLine: false
|
||||
AlwaysBreakAfterReturnType: None
|
||||
AlwaysBreakBeforeMultilineStrings: false
|
||||
AlwaysBreakTemplateDeclarations: MultiLine
|
||||
BinPackArguments: true
|
||||
BinPackParameters: true
|
||||
BraceWrapping:
|
||||
AfterClass: false
|
||||
AfterControlStatement: false
|
||||
AfterEnum: false
|
||||
AfterFunction: false
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: false
|
||||
AfterStruct: false
|
||||
AfterUnion: false
|
||||
AfterExternBlock: false
|
||||
BeforeCatch: false
|
||||
BeforeElse: false
|
||||
IndentBraces: false
|
||||
SplitEmptyFunction: true
|
||||
SplitEmptyRecord: true
|
||||
SplitEmptyNamespace: true
|
||||
BreakBeforeBinaryOperators: None
|
||||
BreakBeforeBraces: Attach
|
||||
BreakBeforeInheritanceComma: false
|
||||
BreakInheritanceList: BeforeColon
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: false
|
||||
BreakConstructorInitializers: BeforeColon
|
||||
BreakAfterJavaFieldAnnotations: false
|
||||
BreakStringLiterals: true
|
||||
ColumnLimit: 120
|
||||
CommentPragmas: '^ IWYU pragma:'
|
||||
CompactNamespaces: false
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: true
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
FixNamespaceComments: true
|
||||
ForEachMacros:
|
||||
- foreach
|
||||
- Q_FOREACH
|
||||
- BOOST_FOREACH
|
||||
IncludeBlocks: Preserve
|
||||
IncludeCategories:
|
||||
- Regex: '^<ext/.*\.h>'
|
||||
Priority: 2
|
||||
- Regex: '^<.*\.h>'
|
||||
Priority: 1
|
||||
- Regex: '^<.*'
|
||||
Priority: 2
|
||||
- Regex: '.*'
|
||||
Priority: 3
|
||||
IncludeIsMainRegex: '([-_](test|unittest))?$'
|
||||
IndentCaseLabels: true
|
||||
IndentPPDirectives: None
|
||||
IndentWidth: 2
|
||||
IndentWrappedFunctionNames: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MacroBlockBegin: ''
|
||||
MacroBlockEnd: ''
|
||||
MaxEmptyLinesToKeep: 1
|
||||
NamespaceIndentation: None
|
||||
PenaltyBreakAssignment: 2
|
||||
PenaltyBreakBeforeFirstCallParameter: 1
|
||||
PenaltyBreakComment: 300
|
||||
PenaltyBreakFirstLessLess: 120
|
||||
PenaltyBreakString: 1000
|
||||
PenaltyBreakTemplateDeclaration: 10
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 2000
|
||||
PointerAlignment: Right
|
||||
RawStringFormats:
|
||||
- Language: Cpp
|
||||
Delimiters:
|
||||
- cc
|
||||
- CC
|
||||
- cpp
|
||||
- Cpp
|
||||
- CPP
|
||||
- 'c++'
|
||||
- 'C++'
|
||||
CanonicalDelimiter: ''
|
||||
BasedOnStyle: google
|
||||
- Language: TextProto
|
||||
Delimiters:
|
||||
- pb
|
||||
- PB
|
||||
- proto
|
||||
- PROTO
|
||||
EnclosingFunctions:
|
||||
- EqualsProto
|
||||
- EquivToProto
|
||||
- PARSE_PARTIAL_TEXT_PROTO
|
||||
- PARSE_TEST_PROTO
|
||||
- PARSE_TEXT_PROTO
|
||||
- ParseTextOrDie
|
||||
- ParseTextProtoOrDie
|
||||
CanonicalDelimiter: ''
|
||||
BasedOnStyle: google
|
||||
ReflowComments: true
|
||||
SortIncludes: false
|
||||
SortUsingDeclarations: false
|
||||
SpaceAfterCStyleCast: true
|
||||
SpaceAfterTemplateKeyword: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeCpp11BracedList: false
|
||||
SpaceBeforeCtorInitializerColon: true
|
||||
SpaceBeforeInheritanceColon: true
|
||||
SpaceBeforeParens: ControlStatements
|
||||
SpaceBeforeRangeBasedForLoopColon: true
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 2
|
||||
SpacesInAngles: false
|
||||
SpacesInContainerLiterals: false
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Auto
|
||||
TabWidth: 2
|
||||
UseTab: Never
|
127
.clang-tidy
Normal file
127
.clang-tidy
Normal file
@@ -0,0 +1,127 @@
|
||||
---
|
||||
Checks: >-
|
||||
*,
|
||||
-abseil-*,
|
||||
-android-*,
|
||||
-boost-*,
|
||||
-bugprone-macro-parentheses,
|
||||
-cert-dcl50-cpp,
|
||||
-cert-err58-cpp,
|
||||
-clang-analyzer-core.CallAndMessage,
|
||||
-clang-analyzer-osx.*,
|
||||
-clang-analyzer-security.*,
|
||||
-cppcoreguidelines-avoid-goto,
|
||||
-cppcoreguidelines-c-copy-assignment-signature,
|
||||
-cppcoreguidelines-owning-memory,
|
||||
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
|
||||
-cppcoreguidelines-pro-bounds-constant-array-index,
|
||||
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
|
||||
-cppcoreguidelines-pro-type-const-cast,
|
||||
-cppcoreguidelines-pro-type-cstyle-cast,
|
||||
-cppcoreguidelines-pro-type-member-init,
|
||||
-cppcoreguidelines-pro-type-reinterpret-cast,
|
||||
-cppcoreguidelines-pro-type-static-cast-downcast,
|
||||
-cppcoreguidelines-pro-type-union-access,
|
||||
-cppcoreguidelines-pro-type-vararg,
|
||||
-cppcoreguidelines-special-member-functions,
|
||||
-fuchsia-*,
|
||||
-fuchsia-default-arguments,
|
||||
-fuchsia-multiple-inheritance,
|
||||
-fuchsia-overloaded-operator,
|
||||
-fuchsia-statically-constructed-objects,
|
||||
-google-build-using-namespace,
|
||||
-google-explicit-constructor,
|
||||
-google-readability-braces-around-statements,
|
||||
-google-readability-casting,
|
||||
-google-readability-todo,
|
||||
-google-runtime-int,
|
||||
-google-runtime-references,
|
||||
-hicpp-*,
|
||||
-llvm-header-guard,
|
||||
-llvm-include-order,
|
||||
-misc-unconventional-assign-operator,
|
||||
-misc-unused-parameters,
|
||||
-modernize-deprecated-headers,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-return-braced-init-list,
|
||||
-modernize-use-auto,
|
||||
-modernize-use-default-member-init,
|
||||
-modernize-use-equals-default,
|
||||
-mpi-*,
|
||||
-objc-*,
|
||||
-performance-unnecessary-value-param,
|
||||
-readability-braces-around-statements,
|
||||
-readability-else-after-return,
|
||||
-readability-implicit-bool-conversion,
|
||||
-readability-named-parameter,
|
||||
-readability-redundant-member-init,
|
||||
-warnings-as-errors,
|
||||
-zircon-*
|
||||
WarningsAsErrors: '*'
|
||||
HeaderFilterRegex: '^.*/src/esphome/.*'
|
||||
AnalyzeTemporaryDtors: false
|
||||
FormatStyle: google
|
||||
CheckOptions:
|
||||
- key: google-readability-braces-around-statements.ShortStatementLines
|
||||
value: '1'
|
||||
- key: google-readability-function-size.StatementThreshold
|
||||
value: '800'
|
||||
- key: google-readability-namespace-comments.ShortNamespaceLines
|
||||
value: '10'
|
||||
- key: google-readability-namespace-comments.SpacesBeforeComments
|
||||
value: '2'
|
||||
- key: modernize-loop-convert.MaxCopySize
|
||||
value: '16'
|
||||
- key: modernize-loop-convert.MinConfidence
|
||||
value: reasonable
|
||||
- key: modernize-loop-convert.NamingStyle
|
||||
value: CamelCase
|
||||
- key: modernize-pass-by-value.IncludeStyle
|
||||
value: llvm
|
||||
- key: modernize-replace-auto-ptr.IncludeStyle
|
||||
value: llvm
|
||||
- key: modernize-use-nullptr.NullMacros
|
||||
value: 'NULL'
|
||||
- key: readability-identifier-naming.LocalVariableCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ClassCase
|
||||
value: 'CamelCase'
|
||||
- key: readability-identifier-naming.StructCase
|
||||
value: 'CamelCase'
|
||||
- key: readability-identifier-naming.EnumCase
|
||||
value: 'CamelCase'
|
||||
- key: readability-identifier-naming.EnumConstantCase
|
||||
value: 'UPPER_CASE'
|
||||
- key: readability-identifier-naming.StaticConstantCase
|
||||
value: 'UPPER_CASE'
|
||||
- key: readability-identifier-naming.StaticVariableCase
|
||||
value: 'UPPER_CASE'
|
||||
- key: readability-identifier-naming.GlobalConstantCase
|
||||
value: 'UPPER_CASE'
|
||||
- key: readability-identifier-naming.ParameterCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.PrivateMemberPrefix
|
||||
value: 'NO_PRIVATE_MEMBERS_ALWAYS_USE_PROTECTED'
|
||||
- key: readability-identifier-naming.PrivateMethodPrefix
|
||||
value: 'NO_PRIVATE_METHODS_ALWAYS_USE_PROTECTED'
|
||||
- key: readability-identifier-naming.ClassMemberCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ClassMemberCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ProtectedMemberCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ProtectedMemberSuffix
|
||||
value: '_'
|
||||
- key: readability-identifier-naming.FunctionCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ClassMethodCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ProtectedMethodCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.ProtectedMethodSuffix
|
||||
value: '_'
|
||||
- key: readability-identifier-naming.VirtualMethodCase
|
||||
value: 'lower_case'
|
||||
- key: readability-identifier-naming.VirtualMethodSuffix
|
||||
value: ''
|
2
.coveragerc
Normal file
2
.coveragerc
Normal file
@@ -0,0 +1,2 @@
|
||||
[run]
|
||||
omit = esphome/components/*
|
32
.devcontainer/devcontainer.json
Normal file
32
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "ESPHome Dev",
|
||||
"context": "..",
|
||||
"dockerFile": "../docker/Dockerfile.dev",
|
||||
"postCreateCommand": "mkdir -p config && pip3 install -e .",
|
||||
"runArgs": ["--privileged", "-e", "ESPHOME_DASHBOARD_USE_PING=1"],
|
||||
"appPort": 6052,
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"redhat.vscode-yaml"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"yaml.customTags": [
|
||||
"!secret scalar",
|
||||
"!lambda scalar",
|
||||
"!include_dir_named scalar",
|
||||
"!include_dir_list scalar",
|
||||
"!include_dir_merge_list scalar",
|
||||
"!include_dir_merge_named scalar"
|
||||
]
|
||||
}
|
||||
}
|
28
.editorconfig
Normal file
28
.editorconfig
Normal file
@@ -0,0 +1,28 @@
|
||||
root = true
|
||||
|
||||
# general
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
charset = utf-8
|
||||
|
||||
# python
|
||||
[*.{py}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
# C++
|
||||
[*.{cpp,h,tcc}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
# Web
|
||||
[*.{js,html,css}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
# YAML
|
||||
[*.{yaml,yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
quote_type = single
|
8
.github/FUNDING.yml
vendored
Normal file
8
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github:
|
||||
patreon: ottowinter
|
||||
open_collective:
|
||||
ko_fi:
|
||||
tidelift:
|
||||
custom: https://esphome.io/guides/supporters.html
|
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,47 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
|
||||
---
|
||||
|
||||
<!-- Thanks for reporting a bug for this project. READ THIS FIRST:
|
||||
- Please make sure to submit issues in the right GitHub repository, if unsure just post it here:
|
||||
- esphomeyaml [here] - This is mostly for reporting bugs when compiling and when you get a long stack trace while compiling or if a configuration fails to validate.
|
||||
- esphomelib [https://github.com/OttoWinter/esphomelib] - Report bugs there if the ESP is crashing or a feature is not working as expected.
|
||||
- esphomedocs [https://github.com/OttoWinter/esphomedocs] - Report bugs there if the documentation is wrong/outdated.
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks (```). Do not delete any text from this template!
|
||||
-->
|
||||
|
||||
**Operating environment (Hass.io/Docker/pip/etc.):**
|
||||
<!--
|
||||
Please provide details about your environment.
|
||||
-->
|
||||
|
||||
**ESP (ESP32/ESP8266/Board/Sonoff):**
|
||||
<!--
|
||||
Please provide details about which ESP you're using.
|
||||
-->
|
||||
|
||||
**Affected component:**
|
||||
<!--
|
||||
Please add the link to the documentation at https://esphomelib.com/esphomeyaml/index.html of the component in question.
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
||||
|
||||
**Problem-relevant YAML-configuration entries:**
|
||||
```yaml
|
||||
|
||||
```
|
||||
|
||||
**Traceback (if applicable):**
|
||||
<!--
|
||||
Please copy the traceback here if compilation is failing. If possible, also connect to the ESP and copy its logs into the backticks.
|
||||
-->
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
**Additional information:**
|
12
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
12
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Issue Tracker
|
||||
url: https://github.com/esphome/issues
|
||||
about: Please create bug reports in the dedicated issue tracker.
|
||||
- name: Feature Request Tracker
|
||||
url: https://github.com/esphome/feature-requests
|
||||
about: Please create feature requests in the dedicated feature request tracker.
|
||||
- name: Frequently Asked Question
|
||||
url: https://esphome.io/guides/faq.html
|
||||
about: Please view the FAQ for common questions and what to include in a bug report.
|
||||
|
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,22 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
|
||||
---
|
||||
|
||||
<!-- READ THIS FIRST:
|
||||
- This is for feature requests only, if you want to have a certain new sensor/module supported, please use the "new integration" template.
|
||||
- Please be as descriptive as possible, especially use-cases that can otherwise not be solved boost the problem's priority.
|
||||
-->
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
<!--
|
||||
A clear and concise description of what the problem is.
|
||||
-->
|
||||
Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A description of what you want to happen.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the feature request here.
|
20
.github/ISSUE_TEMPLATE/new-integration.md
vendored
20
.github/ISSUE_TEMPLATE/new-integration.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: New integration
|
||||
about: Suggest a new integration for esphomelib
|
||||
|
||||
---
|
||||
|
||||
<!-- READ THIS FIRST:
|
||||
- This is for new integrations (such as new sensors/modules) only, for new features within the environment please use the "feature request" template.
|
||||
- Do not delete anything from this template and fill out the form as precisely as possible.
|
||||
-->
|
||||
|
||||
**What new integration would you wish to have?**
|
||||
<!-- A name/description of the new integration/board. -->
|
||||
|
||||
**If possible, provide a link to an existing library for the integration:**
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Additional context**
|
11
.github/PULL_REQUEST_TEMPLATE.md
vendored
11
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -3,18 +3,11 @@
|
||||
|
||||
**Related issue (if applicable):** fixes <link to issue>
|
||||
|
||||
**Pull request in [esphomedocs](https://github.com/OttoWinter/esphomedocs) with documentation (if applicable):** OttoWinter/esphomedocs#<esphomedocs PR number goes here>
|
||||
**Pull request in [esphomelib](https://github.com/OttoWinter/esphomelib) with C++ framework changes (if applicable):** OttoWinter/esphomelib#<esphomelib PR number goes here>
|
||||
|
||||
## Example entry for YAML configuration (if applicable):
|
||||
```yaml
|
||||
|
||||
```
|
||||
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):** esphome/esphome-docs#<esphome-docs PR number goes here>
|
||||
|
||||
## Checklist:
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Tests have been added to verify that the new code works (under `tests/` folder).
|
||||
- [ ] Check this box if you have read, understand, comply, and agree with the [Code of Conduct](https://github.com/OttoWinter/esphomeyaml/blob/master/CODE_OF_CONDUCT.md).
|
||||
|
||||
If user exposed functionality or configuration variables are added/changed:
|
||||
- [ ] Documentation added/updated in [esphomedocs](https://github.com/OttoWinter/esphomedocs).
|
||||
- [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs).
|
||||
|
9
.github/dependabot.yml
vendored
Normal file
9
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
ignore:
|
||||
# Hypotehsis is only used for testing and is updated quite often
|
||||
- dependency-name: hypothesis
|
7
.github/issue-close-app.yml
vendored
Normal file
7
.github/issue-close-app.yml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
comment: >-
|
||||
https://github.com/esphome/esphome/issues/430
|
||||
issueConfigs:
|
||||
- content:
|
||||
- "OTHERWISE THE ISSUE WILL BE CLOSED AUTOMATICALLY"
|
||||
|
||||
caseInsensitive: false
|
36
.github/lock.yml
vendored
Normal file
36
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 7
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: false
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels:
|
||||
- keep-open
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
# only: issues
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
# issues:
|
||||
# exemptLabels:
|
||||
# - help-wanted
|
||||
# lockLabel: outdated
|
||||
|
||||
# pulls:
|
||||
# daysUntilLock: 30
|
||||
|
||||
# Repository to extend settings from
|
||||
# _extends: repo
|
59
.github/stale.yml
vendored
Normal file
59
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 60
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
daysUntilClose: 7
|
||||
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||
onlyLabels: []
|
||||
|
||||
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
|
||||
exemptLabels:
|
||||
- not-stale
|
||||
|
||||
# Set to true to ignore issues in a project (defaults to false)
|
||||
exemptProjects: false
|
||||
|
||||
# Set to true to ignore issues in a milestone (defaults to false)
|
||||
exemptMilestones: true
|
||||
|
||||
# Set to true to ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: false
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
|
||||
# Comment to post when removing the stale label.
|
||||
# unmarkComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
# closeComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 10
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls':
|
||||
# pulls:
|
||||
# daysUntilStale: 30
|
||||
# markComment: >
|
||||
# This pull request has been automatically marked as stale because it has not had
|
||||
# recent activity. It will be closed if no further activity occurs. Thank you
|
||||
# for your contributions.
|
||||
|
||||
# issues:
|
||||
# exemptLabels:
|
||||
# - confirmed
|
55
.github/workflows/ci-docker.yml
vendored
Normal file
55
.github/workflows/ci-docker.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: CI for docker images
|
||||
|
||||
# Only run when docker paths change
|
||||
on:
|
||||
push:
|
||||
branches: [dev, beta, master]
|
||||
paths:
|
||||
- 'docker/**'
|
||||
- '.github/workflows/**'
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docker/**'
|
||||
- '.github/workflows/**'
|
||||
|
||||
jobs:
|
||||
check-docker:
|
||||
name: Build docker containers
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["hassio", "docker"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up env variables
|
||||
run: |
|
||||
base_version="2.6.0"
|
||||
|
||||
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then
|
||||
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
|
||||
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
|
||||
dockerfile="docker/Dockerfile.hassio"
|
||||
else
|
||||
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
|
||||
build_to="esphome/esphome-${{ matrix.arch }}"
|
||||
dockerfile="docker/Dockerfile"
|
||||
fi
|
||||
|
||||
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
|
||||
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
|
||||
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
|
||||
- name: Pull for cache
|
||||
run: |
|
||||
docker pull "${BUILD_TO}:dev" || true
|
||||
- name: Register QEMU binfmt
|
||||
run: docker run --rm --privileged multiarch/qemu-user-static:5.0.0-2 --reset -p yes
|
||||
- run: |
|
||||
docker build \
|
||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
||||
--build-arg "BUILD_VERSION=ci" \
|
||||
--cache-from "${BUILD_TO}:dev" \
|
||||
--file "${DOCKERFILE}" \
|
||||
.
|
178
.github/workflows/ci.yml
vendored
Normal file
178
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
# THESE JOBS ARE COPIED IN release.yml and release-dev.yml
|
||||
# PLEASE ALSO UPDATE THOSE FILES WHEN CHANGING LINES HERE
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
# On dev branch release-dev already performs CI checks
|
||||
# On other branches the `pull_request` trigger will be used
|
||||
branches: [beta, master]
|
||||
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
lint-clang-format:
|
||||
runs-on: ubuntu-latest
|
||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
||||
# doesn't have to be installed
|
||||
container: esphome/esphome-lint:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Cache platformio intermediary files (like libraries etc)
|
||||
# Note: platformio platform versions should be cached via the esphome-lint image
|
||||
- name: Cache Platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: .pio
|
||||
key: lint-cpp-pio-${{ hashFiles('platformio.ini') }}
|
||||
restore-keys: |
|
||||
lint-cpp-pio-
|
||||
# Set up the pio project so that the cpp checks know how files are compiled
|
||||
# (build flags, libraries etc)
|
||||
- name: Set up platformio environment
|
||||
run: pio init --ide atom
|
||||
|
||||
- name: Run clang-format
|
||||
run: script/clang-format -i
|
||||
- name: Suggest changes
|
||||
run: script/ci-suggest-changes
|
||||
|
||||
lint-clang-tidy:
|
||||
runs-on: ubuntu-latest
|
||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
||||
# doesn't have to be installed
|
||||
container: esphome/esphome-lint:latest
|
||||
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split: [1, 2, 3, 4]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Cache platformio intermediary files (like libraries etc)
|
||||
# Note: platformio platform versions should be cached via the esphome-lint image
|
||||
- name: Cache Platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: .pio
|
||||
key: lint-cpp-pio-${{ hashFiles('platformio.ini') }}
|
||||
restore-keys: |
|
||||
lint-cpp-pio-
|
||||
# Set up the pio project so that the cpp checks know how files are compiled
|
||||
# (build flags, libraries etc)
|
||||
- name: Set up platformio environment
|
||||
run: pio init --ide atom
|
||||
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
- name: Run clang-tidy
|
||||
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
|
||||
- name: Suggest changes
|
||||
run: script/ci-suggest-changes
|
||||
|
||||
lint-python:
|
||||
# Don't use the esphome-lint docker image because it may contain outdated requirements.
|
||||
# This way, all dependencies are cached via the cache action.
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
- name: Set up python environment
|
||||
run: script/setup
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Lint Custom
|
||||
run: script/ci-custom.py
|
||||
- name: Lint Python
|
||||
run: script/lint-python
|
||||
- name: Lint CODEOWNERS
|
||||
run: script/build_codeowners.py --check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- test1
|
||||
- test2
|
||||
- test3
|
||||
- test4
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
# Use per test platformio cache because tests have different platform versions
|
||||
- name: Cache ~/.platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core_config.py') }}
|
||||
restore-keys: |
|
||||
test-home-platformio-${{ matrix.test }}-
|
||||
- name: Set up environment
|
||||
run: script/setup
|
||||
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- run: esphome tests/${{ matrix.test }}.yaml compile
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
- name: Set up environment
|
||||
run: script/setup
|
||||
- name: Install Github Actions annotator
|
||||
run: pip install pytest-github-actions-annotate-failures
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Run pytest
|
||||
run: |
|
||||
pytest \
|
||||
-qq \
|
||||
--durations=10 \
|
||||
-o console_output_style=count \
|
||||
tests
|
16
.github/workflows/matchers/ci-custom.json
vendored
Normal file
16
.github/workflows/matchers/ci-custom.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "ci-custom",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^ERROR (.*):(\\d+):(\\d+) - (.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
17
.github/workflows/matchers/clang-tidy.json
vendored
Normal file
17
.github/workflows/matchers/clang-tidy.json
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "clang-tidy",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s+(error):\\s+(.*) \\[([a-z0-9,\\-]+)\\]\\s*$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
.github/workflows/matchers/gcc.json
vendored
Normal file
18
.github/workflows/matchers/gcc.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "gcc",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
28
.github/workflows/matchers/lint-python.json
vendored
Normal file
28
.github/workflows/matchers/lint-python.json
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+) - ([EFCDNW]\\d{3}.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "pylint",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+) - (\\[[EFCRW]\\d{4}\\(.*\\),.*\\].*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "python",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||
"file": 1,
|
||||
"line": 2
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
264
.github/workflows/release-dev.yml
vendored
Normal file
264
.github/workflows/release-dev.yml
vendored
Normal file
@@ -0,0 +1,264 @@
|
||||
name: Publish dev releases to docker hub
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
# THE LINT/TEST JOBS ARE COPIED FROM ci.yaml
|
||||
|
||||
lint-clang-format:
|
||||
runs-on: ubuntu-latest
|
||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
||||
# doesn't have to be installed
|
||||
container: esphome/esphome-lint:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Cache platformio intermediary files (like libraries etc)
|
||||
# Note: platformio platform versions should be cached via the esphome-lint image
|
||||
- name: Cache Platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: .pio
|
||||
key: lint-cpp-pio-${{ hashFiles('platformio.ini') }}
|
||||
restore-keys: |
|
||||
lint-cpp-pio-
|
||||
# Set up the pio project so that the cpp checks know how files are compiled
|
||||
# (build flags, libraries etc)
|
||||
- name: Set up platformio environment
|
||||
run: pio init --ide atom
|
||||
|
||||
- name: Run clang-format
|
||||
run: script/clang-format -i
|
||||
- name: Suggest changes
|
||||
run: script/ci-suggest-changes
|
||||
|
||||
lint-clang-tidy:
|
||||
runs-on: ubuntu-latest
|
||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
||||
# doesn't have to be installed
|
||||
container: esphome/esphome-lint:latest
|
||||
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split: [1, 2, 3, 4]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Cache platformio intermediary files (like libraries etc)
|
||||
# Note: platformio platform versions should be cached via the esphome-lint image
|
||||
- name: Cache Platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: .pio
|
||||
key: lint-cpp-pio-${{ hashFiles('platformio.ini') }}
|
||||
restore-keys: |
|
||||
lint-cpp-pio-
|
||||
# Set up the pio project so that the cpp checks know how files are compiled
|
||||
# (build flags, libraries etc)
|
||||
- name: Set up platformio environment
|
||||
run: pio init --ide atom
|
||||
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
- name: Run clang-tidy
|
||||
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
|
||||
- name: Suggest changes
|
||||
run: script/ci-suggest-changes
|
||||
|
||||
lint-python:
|
||||
# Don't use the esphome-lint docker image because it may contain outdated requirements.
|
||||
# This way, all dependencies are cached via the cache action.
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
- name: Set up python environment
|
||||
run: script/setup
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Lint Custom
|
||||
run: script/ci-custom.py
|
||||
- name: Lint Python
|
||||
run: script/lint-python
|
||||
- name: Lint CODEOWNERS
|
||||
run: script/build_codeowners.py --check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- test1
|
||||
- test2
|
||||
- test3
|
||||
- test4
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
# Use per test platformio cache because tests have different platform versions
|
||||
- name: Cache ~/.platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core_config.py') }}
|
||||
restore-keys: |
|
||||
test-home-platformio-${{ matrix.test }}-
|
||||
- name: Set up environment
|
||||
run: script/setup
|
||||
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- run: esphome tests/${{ matrix.test }}.yaml compile
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
- name: Set up environment
|
||||
run: script/setup
|
||||
- name: Install Github Actions annotator
|
||||
run: pip install pytest-github-actions-annotate-failures
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Run pytest
|
||||
run: |
|
||||
pytest \
|
||||
-qq \
|
||||
--durations=10 \
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
|
||||
deploy-docker:
|
||||
name: Build and publish docker containers
|
||||
if: github.repository == 'esphome/esphome'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
# Hassio dev image doesn't use esphome/esphome-hassio-$arch and uses base directly
|
||||
build_type: ["docker"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set TAG
|
||||
run: |
|
||||
TAG="${GITHUB_SHA:0:7}"
|
||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Set up env variables
|
||||
run: |
|
||||
base_version="2.6.0"
|
||||
|
||||
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then
|
||||
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
|
||||
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
|
||||
dockerfile="docker/Dockerfile.hassio"
|
||||
else
|
||||
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
|
||||
build_to="esphome/esphome-${{ matrix.arch }}"
|
||||
dockerfile="docker/Dockerfile"
|
||||
fi
|
||||
|
||||
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
|
||||
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
|
||||
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
|
||||
- name: Pull for cache
|
||||
run: |
|
||||
docker pull "${BUILD_TO}:dev" || true
|
||||
- name: Register QEMU binfmt
|
||||
run: docker run --rm --privileged multiarch/qemu-user-static:5.0.0-2 --reset -p yes
|
||||
- run: |
|
||||
docker build \
|
||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
||||
--build-arg "BUILD_VERSION=${TAG}" \
|
||||
--tag "${BUILD_TO}:${TAG}" \
|
||||
--tag "${BUILD_TO}:dev" \
|
||||
--cache-from "${BUILD_TO}:dev" \
|
||||
--file "${DOCKERFILE}" \
|
||||
.
|
||||
- name: Log in to docker hub
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
||||
- run: |
|
||||
docker push "${BUILD_TO}:${TAG}"
|
||||
docker push "${BUILD_TO}:dev"
|
||||
|
||||
|
||||
deploy-docker-manifest:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [deploy-docker]
|
||||
steps:
|
||||
- name: Enable experimental manifest support
|
||||
run: |
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
||||
- name: Set TAG
|
||||
run: |
|
||||
TAG="${GITHUB_SHA:0:7}"
|
||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Log in to docker hub
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
||||
- name: "Create the manifest"
|
||||
run: |
|
||||
docker manifest create esphome/esphome:${TAG} \
|
||||
esphome/esphome-aarch64:${TAG} \
|
||||
esphome/esphome-amd64:${TAG} \
|
||||
esphome/esphome-armv7:${TAG}
|
||||
docker manifest push esphome/esphome:${TAG}
|
||||
|
||||
docker manifest create esphome/esphome:dev \
|
||||
esphome/esphome-aarch64:${TAG} \
|
||||
esphome/esphome-amd64:${TAG} \
|
||||
esphome/esphome-armv7:${TAG}
|
||||
docker manifest push esphome/esphome:dev
|
327
.github/workflows/release.yml
vendored
Normal file
327
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,327 @@
|
||||
name: Publish Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
# THE LINT/TEST JOBS ARE COPIED FROM ci.yaml
|
||||
|
||||
lint-clang-format:
|
||||
runs-on: ubuntu-latest
|
||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
||||
# doesn't have to be installed
|
||||
container: esphome/esphome-lint:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Cache platformio intermediary files (like libraries etc)
|
||||
# Note: platformio platform versions should be cached via the esphome-lint image
|
||||
- name: Cache Platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: .pio
|
||||
key: lint-cpp-pio-${{ hashFiles('platformio.ini') }}
|
||||
restore-keys: |
|
||||
lint-cpp-pio-
|
||||
# Set up the pio project so that the cpp checks know how files are compiled
|
||||
# (build flags, libraries etc)
|
||||
- name: Set up platformio environment
|
||||
run: pio init --ide atom
|
||||
|
||||
- name: Run clang-format
|
||||
run: script/clang-format -i
|
||||
- name: Suggest changes
|
||||
run: script/ci-suggest-changes
|
||||
|
||||
lint-clang-tidy:
|
||||
runs-on: ubuntu-latest
|
||||
# cpp lint job runs with esphome-lint docker image so that clang-format-*
|
||||
# doesn't have to be installed
|
||||
container: esphome/esphome-lint:latest
|
||||
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split: [1, 2, 3, 4]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Cache platformio intermediary files (like libraries etc)
|
||||
# Note: platformio platform versions should be cached via the esphome-lint image
|
||||
- name: Cache Platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: .pio
|
||||
key: lint-cpp-pio-${{ hashFiles('platformio.ini') }}
|
||||
restore-keys: |
|
||||
lint-cpp-pio-
|
||||
# Set up the pio project so that the cpp checks know how files are compiled
|
||||
# (build flags, libraries etc)
|
||||
- name: Set up platformio environment
|
||||
run: pio init --ide atom
|
||||
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
- name: Run clang-tidy
|
||||
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
|
||||
- name: Suggest changes
|
||||
run: script/ci-suggest-changes
|
||||
|
||||
lint-python:
|
||||
# Don't use the esphome-lint docker image because it may contain outdated requirements.
|
||||
# This way, all dependencies are cached via the cache action.
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
- name: Set up python environment
|
||||
run: script/setup
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Lint Custom
|
||||
run: script/ci-custom.py
|
||||
- name: Lint Python
|
||||
run: script/lint-python
|
||||
- name: Lint CODEOWNERS
|
||||
run: script/build_codeowners.py --check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- test1
|
||||
- test2
|
||||
- test3
|
||||
- test4
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
# Use per test platformio cache because tests have different platform versions
|
||||
- name: Cache ~/.platformio
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core_config.py') }}
|
||||
restore-keys: |
|
||||
test-home-platformio-${{ matrix.test }}-
|
||||
- name: Set up environment
|
||||
run: script/setup
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- run: esphome tests/${{ matrix.test }}.yaml compile
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
- name: Cache pip modules
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
esphome-pip-3.7-
|
||||
- name: Set up environment
|
||||
run: script/setup
|
||||
- name: Install Github Actions annotator
|
||||
run: pip install pytest-github-actions-annotate-failures
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Run pytest
|
||||
run: |
|
||||
pytest \
|
||||
-qq \
|
||||
--durations=10 \
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
|
||||
deploy-pypi:
|
||||
name: Build and publish to PyPi
|
||||
if: github.repository == 'esphome/esphome'
|
||||
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Set up python environment
|
||||
run: |
|
||||
script/setup
|
||||
pip install setuptools wheel twine
|
||||
- name: Build
|
||||
run: python setup.py sdist bdist_wheel
|
||||
- name: Upload
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
run: twine upload dist/*
|
||||
|
||||
deploy-docker:
|
||||
name: Build and publish docker containers
|
||||
if: github.repository == 'esphome/esphome'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["hassio", "docker"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set TAG
|
||||
run: |
|
||||
TAG="${GITHUB_REF#refs/tags/v}"
|
||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Set up env variables
|
||||
run: |
|
||||
base_version="2.6.0"
|
||||
|
||||
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then
|
||||
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
|
||||
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
|
||||
dockerfile="docker/Dockerfile.hassio"
|
||||
else
|
||||
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
|
||||
build_to="esphome/esphome-${{ matrix.arch }}"
|
||||
dockerfile="docker/Dockerfile"
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.release.prerelease }}" == "true" ]]; then
|
||||
cache_tag="beta"
|
||||
else
|
||||
cache_tag="latest"
|
||||
fi
|
||||
|
||||
# Set env variables so these values don't need to be calculated again
|
||||
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
|
||||
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
|
||||
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
|
||||
echo "CACHE_TAG=${cache_tag}" >> $GITHUB_ENV
|
||||
- name: Pull for cache
|
||||
run: |
|
||||
docker pull "${BUILD_TO}:${CACHE_TAG}" || true
|
||||
- name: Register QEMU binfmt
|
||||
run: docker run --rm --privileged multiarch/qemu-user-static:5.0.0-2 --reset -p yes
|
||||
- run: |
|
||||
docker build \
|
||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
||||
--build-arg "BUILD_VERSION=${TAG}" \
|
||||
--tag "${BUILD_TO}:${TAG}" \
|
||||
--cache-from "${BUILD_TO}:${CACHE_TAG}" \
|
||||
--file "${DOCKERFILE}" \
|
||||
.
|
||||
- name: Log in to docker hub
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
||||
- run: docker push "${BUILD_TO}:${TAG}"
|
||||
|
||||
# Always publish to beta tag (also full releases)
|
||||
- name: Publish docker beta tag
|
||||
run: |
|
||||
docker tag "${BUILD_TO}:${TAG}" "${BUILD_TO}:beta"
|
||||
docker push "${BUILD_TO}:beta"
|
||||
|
||||
- if: ${{ !github.event.release.prerelease }}
|
||||
name: Publish docker latest tag
|
||||
run: |
|
||||
docker tag "${BUILD_TO}:${TAG}" "${BUILD_TO}:latest"
|
||||
docker push "${BUILD_TO}:latest"
|
||||
|
||||
deploy-docker-manifest:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [deploy-docker]
|
||||
steps:
|
||||
- name: Enable experimental manifest support
|
||||
run: |
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
||||
- name: Set TAG
|
||||
run: |
|
||||
TAG="${GITHUB_REF#refs/tags/v}"
|
||||
echo "TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Log in to docker hub
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
|
||||
- name: "Create the manifest"
|
||||
run: |
|
||||
docker manifest create esphome/esphome:${TAG} \
|
||||
esphome/esphome-aarch64:${TAG} \
|
||||
esphome/esphome-amd64:${TAG} \
|
||||
esphome/esphome-armv7:${TAG}
|
||||
docker manifest push esphome/esphome:${TAG}
|
||||
|
||||
- name: Publish docker beta tag
|
||||
run: |
|
||||
docker manifest create esphome/esphome:beta \
|
||||
esphome/esphome-aarch64:${TAG} \
|
||||
esphome/esphome-amd64:${TAG} \
|
||||
esphome/esphome-armv7:${TAG}
|
||||
docker manifest push esphome/esphome:beta
|
||||
|
||||
- name: Publish docker latest tag
|
||||
if: ${{ !github.event.release.prerelease }}
|
||||
run: |
|
||||
docker manifest create esphome/esphome:latest \
|
||||
esphome/esphome-aarch64:${TAG} \
|
||||
esphome/esphome-amd64:${TAG} \
|
||||
esphome/esphome-armv7:${TAG}
|
||||
docker manifest push esphome/esphome:latest
|
||||
|
||||
deploy-hassio-repo:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [deploy-docker]
|
||||
steps:
|
||||
- env:
|
||||
TOKEN: ${{ secrets.DEPLOY_HASSIO_TOKEN }}
|
||||
run: |
|
||||
TAG="${GITHUB_REF#refs/tags/v}"
|
||||
curl \
|
||||
-u ":$TOKEN" \
|
||||
-X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
https://api.github.com/repos/esphome/hassio/actions/workflows/bump-version.yml/dispatches \
|
||||
-d "{\"ref\":\"master\",\"inputs\":{\"version\":\"$TAG\"}}"
|
102
.gitignore
vendored
102
.gitignore
vendored
@@ -6,6 +6,22 @@ __pycache__/
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Hide sublime text stuff
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
|
||||
# Intellij Idea
|
||||
.idea
|
||||
|
||||
# Hide some OS X stuff
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
@@ -25,12 +41,6 @@ wheels/
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
@@ -41,8 +51,10 @@ htmlcov/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
.esphome
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
cov.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
@@ -51,36 +63,9 @@ coverage.xml
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
@@ -90,18 +75,49 @@ ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.pio
|
||||
.vscode/
|
||||
!.vscode/tasks.json
|
||||
CMakeListsPrivate.txt
|
||||
CMakeLists.txt
|
||||
|
||||
# User-specific stuff:
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/dictionaries
|
||||
|
||||
# Sensitive or high-churn files:
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.xml
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/dynamic.xml
|
||||
|
||||
# CMake
|
||||
cmake-build-debug/
|
||||
cmake-build-release/
|
||||
|
||||
CMakeCache.txt
|
||||
CMakeFiles
|
||||
CMakeScripts
|
||||
Testing
|
||||
Makefile
|
||||
cmake_install.cmake
|
||||
install_manifest.txt
|
||||
compile_commands.json
|
||||
CTestTestfile.cmake
|
||||
/*.cbp
|
||||
|
||||
.clang_complete
|
||||
.gcc-flags.json
|
||||
|
||||
config/
|
||||
tests/build/
|
||||
tests/.esphome/
|
||||
/.temp-clang-tidy.cpp
|
||||
.pio/
|
||||
|
320
.gitlab-ci.yml
320
.gitlab-ci.yml
@@ -1,320 +0,0 @@
|
||||
---
|
||||
# Based on https://gitlab.com/hassio-addons/addon-node-red/blob/master/.gitlab-ci.yml
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
|
||||
stages:
|
||||
- lint
|
||||
- test
|
||||
- build
|
||||
- deploy
|
||||
|
||||
.lint: &lint
|
||||
stage: lint
|
||||
tags:
|
||||
- python2.7
|
||||
- esphomeyaml-lint
|
||||
|
||||
.test: &test
|
||||
stage: test
|
||||
before_script:
|
||||
- pip install -e .
|
||||
tags:
|
||||
- python2.7
|
||||
- esphomeyaml-test
|
||||
variables:
|
||||
TZ: UTC
|
||||
cache:
|
||||
paths:
|
||||
- tests/build
|
||||
|
||||
.docker-builder: &docker-builder
|
||||
before_script:
|
||||
- docker info
|
||||
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" "$CI_REGISTRY"
|
||||
services:
|
||||
- docker:dind
|
||||
tags:
|
||||
- hassio-builder
|
||||
|
||||
flake8:
|
||||
<<: *lint
|
||||
script:
|
||||
- flake8 esphomeyaml
|
||||
|
||||
pylint:
|
||||
<<: *lint
|
||||
script:
|
||||
- pylint esphomeyaml
|
||||
|
||||
test1:
|
||||
<<: *test
|
||||
script:
|
||||
- esphomeyaml tests/test1.yaml compile
|
||||
|
||||
test2:
|
||||
<<: *test
|
||||
script:
|
||||
- esphomeyaml tests/test2.yaml compile
|
||||
|
||||
.build-hassio: &build-hassio
|
||||
<<: *docker-builder
|
||||
stage: build
|
||||
script:
|
||||
- docker run --rm --privileged hassioaddons/qemu-user-static:latest
|
||||
- BUILD_FROM=homeassistant/${ADDON_ARCH}-base-ubuntu:latest
|
||||
- ADDON_VERSION="${CI_COMMIT_TAG#v}"
|
||||
- ADDON_VERSION="${ADDON_VERSION:-${CI_COMMIT_SHA:0:7}}"
|
||||
- ESPHOMELIB_VERSION="${ESPHOMELIB_VERSION:-dev}"
|
||||
- echo "Build from ${BUILD_FROM}"
|
||||
- echo "Add-on version ${ADDON_VERSION}"
|
||||
- echo "Esphomelib version ${ESPHOMELIB_VERSION}"
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev"
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- |
|
||||
docker build \
|
||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
||||
--build-arg "ADDON_ARCH=${ADDON_ARCH}" \
|
||||
--build-arg "ADDON_VERSION=${ADDON_VERSION}" \
|
||||
--build-arg "ESPHOMELIB_VERSION=${ESPHOMELIB_VERSION}" \
|
||||
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev" \
|
||||
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
--file "docker/Dockerfile.hassio" \
|
||||
.
|
||||
- |
|
||||
if [ "${DO_PUSH:-true}" = true ]; then
|
||||
echo "Pushing to CI registry"
|
||||
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}
|
||||
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev
|
||||
fi
|
||||
|
||||
# Generic deploy template
|
||||
.deploy-release: &deploy-release
|
||||
<<: *docker-builder
|
||||
stage: deploy
|
||||
script:
|
||||
- version="${CI_COMMIT_TAG#v}"
|
||||
- echo "Publishing release version ${version}"
|
||||
- docker pull "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+$/
|
||||
except:
|
||||
- /^(?!master).+@/
|
||||
|
||||
.deploy-beta: &deploy-beta
|
||||
<<: *docker-builder
|
||||
stage: deploy
|
||||
script:
|
||||
- version="${CI_COMMIT_TAG#v}"
|
||||
- echo "Publishing beta version ${version}"
|
||||
- docker pull "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+b\d+$/
|
||||
except:
|
||||
- /^(?!rc).+@/
|
||||
|
||||
# Build jobs
|
||||
build:normal:
|
||||
<<: *docker-builder
|
||||
stage: build
|
||||
script:
|
||||
- docker build -t "${CI_REGISTRY}/esphomeyaml:dev" .
|
||||
|
||||
.build-hassio-edge: &build-hassio-edge
|
||||
<<: *build-hassio
|
||||
except:
|
||||
- /^v\d+\.\d+\.\d+$/
|
||||
- /^v\d+\.\d+\.\d+b\d+$/
|
||||
|
||||
.build-hassio-release: &build-hassio-release
|
||||
<<: *build-hassio
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+$/
|
||||
- /^v\d+\.\d+\.\d+b\d+$/
|
||||
|
||||
build:hassio-armhf-edge:
|
||||
<<: *build-hassio-edge
|
||||
variables:
|
||||
ADDON_ARCH: armhf
|
||||
DO_PUSH: "false"
|
||||
|
||||
build:hassio-armhf:
|
||||
<<: *build-hassio-release
|
||||
variables:
|
||||
ADDON_ARCH: armhf
|
||||
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
#build:hassio-aarch64-edge:
|
||||
# <<: *build-hassio-edge
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
# DO_PUSH: "false"
|
||||
|
||||
#build:hassio-aarch64:
|
||||
# <<: *build-hassio-release
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
# ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
build:hassio-i386-edge:
|
||||
<<: *build-hassio-edge
|
||||
variables:
|
||||
ADDON_ARCH: i386
|
||||
DO_PUSH: "false"
|
||||
|
||||
build:hassio-i386:
|
||||
<<: *build-hassio-release
|
||||
variables:
|
||||
ADDON_ARCH: i386
|
||||
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
build:hassio-amd64-edge:
|
||||
<<: *build-hassio-edge
|
||||
variables:
|
||||
ADDON_ARCH: amd64
|
||||
DO_PUSH: "false"
|
||||
|
||||
build:hassio-amd64:
|
||||
<<: *build-hassio-release
|
||||
variables:
|
||||
ADDON_ARCH: amd64
|
||||
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
# Deploy jobs
|
||||
deploy-release:armhf:
|
||||
<<: *deploy-release
|
||||
variables:
|
||||
ADDON_ARCH: armhf
|
||||
|
||||
deploy-beta:armhf:
|
||||
<<: *deploy-beta
|
||||
variables:
|
||||
ADDON_ARCH: armhf
|
||||
|
||||
#deploy-release:aarch64:
|
||||
# <<: *deploy-release
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
#
|
||||
#deploy-beta:aarch64:
|
||||
# <<: *deploy-beta
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
|
||||
deploy-release:i386:
|
||||
<<: *deploy-release
|
||||
variables:
|
||||
ADDON_ARCH: i386
|
||||
|
||||
deploy-beta:i386:
|
||||
<<: *deploy-beta
|
||||
variables:
|
||||
ADDON_ARCH: i386
|
||||
|
||||
deploy-release:amd64:
|
||||
<<: *deploy-release
|
||||
variables:
|
||||
ADDON_ARCH: amd64
|
||||
|
||||
deploy-beta:amd64:
|
||||
<<: *deploy-beta
|
||||
variables:
|
||||
ADDON_ARCH: amd64
|
||||
|
||||
.deploy-pypi: &deploy-pypi
|
||||
stage: deploy
|
||||
before_script:
|
||||
- pip install -e .
|
||||
- pip install twine
|
||||
script:
|
||||
- python setup.py sdist
|
||||
- twine upload dist/*
|
||||
tags:
|
||||
- python2.7
|
||||
- esphomeyaml-test
|
||||
|
||||
deploy-release:pypi:
|
||||
<<: *deploy-pypi
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+$/
|
||||
except:
|
||||
- /^(?!master).+@/
|
||||
|
||||
deploy-beta:pypi:
|
||||
<<: *deploy-pypi
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+b\d+$/
|
||||
except:
|
||||
- /^(?!rc).+@/
|
6
.gitpod.yml
Normal file
6
.gitpod.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
ports:
|
||||
- port: 6052
|
||||
onOpen: open-preview
|
||||
tasks:
|
||||
- before: pyenv local $(pyenv version | grep '^3\.' | cut -d ' ' -f 1) && script/setup
|
||||
command: python -m esphome config dashboard
|
11
.pre-commit-config.yaml
Normal file
11
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: flake8
|
20
.travis.yml
20
.travis.yml
@@ -1,20 +0,0 @@
|
||||
sudo: false
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
jobs:
|
||||
include:
|
||||
- name: "Lint"
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
- pip install flake8==3.5.0 pylint==1.9.3 tzlocal pillow
|
||||
script:
|
||||
- flake8 esphomeyaml
|
||||
- pylint esphomeyaml
|
||||
- name: "Test"
|
||||
install:
|
||||
- pip install -e .
|
||||
- pip install tzlocal pillow
|
||||
script:
|
||||
- esphomeyaml tests/test1.yaml compile
|
||||
- esphomeyaml tests/test2.yaml compile
|
11
.vscode/tasks.json
vendored
Normal file
11
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "run",
|
||||
"type": "shell",
|
||||
"command": "python3 -m esphome config dashboard",
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
103
CODEOWNERS
Normal file
103
CODEOWNERS
Normal file
@@ -0,0 +1,103 @@
|
||||
# This file is generated by script/build_codeowners.py
|
||||
# People marked here will be automatically requested for a review
|
||||
# when the code that they own is touched.
|
||||
#
|
||||
# Every time an issue is created with a label corresponding to an integration,
|
||||
# the integration's code owner is automatically notified.
|
||||
|
||||
# Core Code
|
||||
setup.py @esphome/core
|
||||
esphome/*.py @esphome/core
|
||||
esphome/core/* @esphome/core
|
||||
|
||||
# Integrations
|
||||
esphome/components/ac_dimmer/* @glmnet
|
||||
esphome/components/adc/* @esphome/core
|
||||
esphome/components/animation/* @syndlex
|
||||
esphome/components/api/* @OttoWinter
|
||||
esphome/components/async_tcp/* @OttoWinter
|
||||
esphome/components/atc_mithermometer/* @ahpohl
|
||||
esphome/components/bang_bang/* @OttoWinter
|
||||
esphome/components/binary_sensor/* @esphome/core
|
||||
esphome/components/canbus/* @danielschramm @mvturnho
|
||||
esphome/components/captive_portal/* @OttoWinter
|
||||
esphome/components/climate/* @esphome/core
|
||||
esphome/components/climate_ir/* @glmnet
|
||||
esphome/components/coolix/* @glmnet
|
||||
esphome/components/cover/* @esphome/core
|
||||
esphome/components/ct_clamp/* @jesserockz
|
||||
esphome/components/debug/* @OttoWinter
|
||||
esphome/components/dfplayer/* @glmnet
|
||||
esphome/components/dht/* @OttoWinter
|
||||
esphome/components/ds1307/* @badbadc0ffee
|
||||
esphome/components/exposure_notifications/* @OttoWinter
|
||||
esphome/components/ezo/* @ssieb
|
||||
esphome/components/fastled_base/* @OttoWinter
|
||||
esphome/components/globals/* @esphome/core
|
||||
esphome/components/gpio/* @esphome/core
|
||||
esphome/components/homeassistant/* @OttoWinter
|
||||
esphome/components/i2c/* @esphome/core
|
||||
esphome/components/inkplate6/* @jesserockz
|
||||
esphome/components/integration/* @OttoWinter
|
||||
esphome/components/interval/* @esphome/core
|
||||
esphome/components/json/* @OttoWinter
|
||||
esphome/components/ledc/* @OttoWinter
|
||||
esphome/components/light/* @esphome/core
|
||||
esphome/components/logger/* @esphome/core
|
||||
esphome/components/mcp23s08/* @SenexCrenshaw
|
||||
esphome/components/mcp23s17/* @SenexCrenshaw
|
||||
esphome/components/mcp2515/* @danielschramm @mvturnho
|
||||
esphome/components/mcp9808/* @k7hpn
|
||||
esphome/components/network/* @esphome/core
|
||||
esphome/components/nfc/* @jesserockz
|
||||
esphome/components/ota/* @esphome/core
|
||||
esphome/components/output/* @esphome/core
|
||||
esphome/components/pid/* @OttoWinter
|
||||
esphome/components/pn532/* @OttoWinter @jesserockz
|
||||
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
|
||||
esphome/components/pn532_spi/* @OttoWinter @jesserockz
|
||||
esphome/components/power_supply/* @esphome/core
|
||||
esphome/components/rc522/* @glmnet
|
||||
esphome/components/rc522_i2c/* @glmnet
|
||||
esphome/components/rc522_spi/* @glmnet
|
||||
esphome/components/restart/* @esphome/core
|
||||
esphome/components/rf_bridge/* @jesserockz
|
||||
esphome/components/rtttl/* @glmnet
|
||||
esphome/components/script/* @esphome/core
|
||||
esphome/components/sensor/* @esphome/core
|
||||
esphome/components/shutdown/* @esphome/core
|
||||
esphome/components/sim800l/* @glmnet
|
||||
esphome/components/spi/* @esphome/core
|
||||
esphome/components/ssd1322_base/* @kbx81
|
||||
esphome/components/ssd1322_spi/* @kbx81
|
||||
esphome/components/ssd1325_base/* @kbx81
|
||||
esphome/components/ssd1325_spi/* @kbx81
|
||||
esphome/components/ssd1327_base/* @kbx81
|
||||
esphome/components/ssd1327_i2c/* @kbx81
|
||||
esphome/components/ssd1327_spi/* @kbx81
|
||||
esphome/components/ssd1331_base/* @kbx81
|
||||
esphome/components/ssd1331_spi/* @kbx81
|
||||
esphome/components/ssd1351_base/* @kbx81
|
||||
esphome/components/ssd1351_spi/* @kbx81
|
||||
esphome/components/st7735/* @SenexCrenshaw
|
||||
esphome/components/st7789v/* @kbx81
|
||||
esphome/components/substitutions/* @esphome/core
|
||||
esphome/components/sun/* @OttoWinter
|
||||
esphome/components/switch/* @esphome/core
|
||||
esphome/components/tcl112/* @glmnet
|
||||
esphome/components/teleinfo/* @0hax
|
||||
esphome/components/thermostat/* @kbx81
|
||||
esphome/components/time/* @OttoWinter
|
||||
esphome/components/tm1637/* @glmnet
|
||||
esphome/components/tmp102/* @timsavage
|
||||
esphome/components/tuya/binary_sensor/* @jesserockz
|
||||
esphome/components/tuya/climate/* @jesserockz
|
||||
esphome/components/tuya/sensor/* @jesserockz
|
||||
esphome/components/tuya/switch/* @jesserockz
|
||||
esphome/components/uart/* @esphome/core
|
||||
esphome/components/ultrasonic/* @OttoWinter
|
||||
esphome/components/version/* @esphome/core
|
||||
esphome/components/web_server_base/* @OttoWinter
|
||||
esphome/components/whirlpool/* @glmnet
|
||||
esphome/components/xiaomi_lywsd03mmc/* @ahpohl
|
||||
esphome/components/xiaomi_mhoc401/* @vevsvevs
|
@@ -1,16 +1,16 @@
|
||||
# Contributing to esphomeyaml
|
||||
# Contributing to ESPHome
|
||||
|
||||
esphomeyaml is a part of esphomelib and is responsible for reading in YAML configuration files,
|
||||
This python project is responsible for reading in YAML configuration files,
|
||||
converting them to C++ code. This code is then converted to a platformio project and compiled
|
||||
with [esphomelib](https://github.com/OttoWinter/esphomelib), the C++ framework behind the project.
|
||||
with [esphome-core](https://github.com/esphome/esphome-core), the C++ framework behind the project.
|
||||
|
||||
For a detailed guide, please see https://esphomelib.com/esphomeyaml/guides/contributing.html#contributing-to-esphomeyaml
|
||||
For a detailed guide, please see https://esphome.io/guides/contributing.html#contributing-to-esphomeyaml
|
||||
|
||||
Things to note when contributing:
|
||||
|
||||
- Please test your changes :)
|
||||
- If a new feature is added or an existing user-facing feature is changed, you should also
|
||||
update the [docs](https://github.com/OttoWinter/esphomedocs). See [contributing to esphomedocs](https://esphomelib.com/esphomeyaml/guides/contributing.html#contributing-to-esphomedocs)
|
||||
update the [docs](https://github.com/esphome/esphome-docs). See [contributing to esphome-docs](https://esphome.io/guides/contributing.html#contributing-to-esphomedocs)
|
||||
for more information.
|
||||
- Please also update the tests in the `tests/` folder. You can do so by just adding a line in one of the YAML files
|
||||
which checks if your new feature compiles correctly.
|
||||
|
29
Dockerfile
29
Dockerfile
@@ -1,29 +0,0 @@
|
||||
ARG BUILD_FROM=python:2.7
|
||||
FROM ${BUILD_FROM}
|
||||
MAINTAINER Otto Winter <contact@otto-winter.com>
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No && \
|
||||
platformio settings set check_libraries_interval 1000000 && \
|
||||
platformio settings set check_platformio_interval 1000000 && \
|
||||
platformio settings set check_platforms_interval 1000000
|
||||
|
||||
ENV ESPHOMEYAML_OTA_HOST_PORT=6123
|
||||
EXPOSE 6123
|
||||
VOLUME /config
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||
|
||||
WORKDIR /config
|
||||
ENTRYPOINT ["esphomeyaml"]
|
||||
CMD ["/config", "dashboard"]
|
692
LICENSE
692
LICENSE
@@ -1,6 +1,17 @@
|
||||
MIT License
|
||||
# ESPHome License
|
||||
|
||||
Copyright (c) 2018 Otto Winter
|
||||
Copyright (c) 2019 ESPHome
|
||||
|
||||
The ESPHome License is made up of two base licenses: MIT and the GNU GENERAL PUBLIC LICENSE.
|
||||
The C++/runtime codebase of the ESPHome project (file extensions .c, .cpp, .h, .hpp, .tcc, .ino) are
|
||||
published under the GPLv3 license. The python codebase and all other parts of this codebase are
|
||||
published under the MIT license.
|
||||
|
||||
Both MIT and GPLv3 licenses are attached to this document.
|
||||
|
||||
## MIT License
|
||||
|
||||
Copyright (c) 2019 ESPHome
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -19,3 +30,680 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
## GPLv3 License
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
|
@@ -1,4 +1,7 @@
|
||||
include LICENSE
|
||||
include README.md
|
||||
include esphomeyaml/dashboard/templates/index.html
|
||||
include esphomeyaml/dashboard/static/materialize-stepper.min.css
|
||||
include esphomeyaml/dashboard/static/materialize-stepper.min.js
|
||||
include requirements.txt
|
||||
include esphome/dashboard/templates/*.html
|
||||
recursive-include esphome/dashboard/static *.ico *.js *.css *.woff* LICENSE
|
||||
recursive-include esphome *.cpp *.h *.tcc
|
||||
recursive-include esphome LICENSE.txt
|
||||
|
39
README.md
39
README.md
@@ -1,38 +1,9 @@
|
||||
# esphomeyaml for [esphomelib](https://github.com/OttoWinter/esphomelib)
|
||||
# ESPHome [](https://travis-ci.org/esphome/esphome) [](https://discord.gg/KhAMKrd) [](https://GitHub.com/esphome/esphome/releases/)
|
||||
|
||||
### Getting Started Guide: https://esphomelib.com/esphomeyaml/guides/getting_started_command_line.html
|
||||
[](https://esphome.io/)
|
||||
|
||||
### Available Components: https://esphomelib.com/esphomeyaml/index.html
|
||||
**Documentation:** https://esphome.io/
|
||||
|
||||
esphomeyaml is the solution for your ESP8266/ESP32 projects with Home Assistant. It allows you to create **custom firmwares** for your microcontrollers with no programming experience required. All you need to know is the YAML configuration format which is also used by [Home Assistant](https://www.home-assistant.io).
|
||||
For issues, please go to [the issue tracker](https://github.com/esphome/issues/issues).
|
||||
|
||||
esphomeyaml will:
|
||||
|
||||
* Read your configuration file and warn you about potential errors (like using the invalid pins.)
|
||||
* Create a custom C++ sketch file for you using esphomeyaml's powerful C++ generation engine.
|
||||
* Compile the sketch file for you using [platformio](http://platformio.org/).
|
||||
* Upload the binary to your ESP via Over the Air updates.
|
||||
* Automatically start remote logs via MQTT.
|
||||
|
||||
And all of that with a single command 🎉:
|
||||
|
||||
```bash
|
||||
esphomeyaml configuration.yaml run
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
* **No programming experience required:** just edit YAML configuration
|
||||
files like you're used to with Home Assistant.
|
||||
* **Flexible:** Use [esphomelib](https://github.com/OttoWinter/esphomelib)'s powerful core to create custom sensors/outputs.
|
||||
* **Fast and efficient:** Written in C++ and keeps memory consumption to a minimum.
|
||||
* **Made for [Home Assistant](https://www.home-assistant.io):** Almost all [Home Assistant](https://www.home-assistant.io) features are supported out of the box. Including RGB lights and many more.
|
||||
* **Easy reproducible configuration:** No need to go through a long setup process for every single node. Just copy a configuration file and run a single command.
|
||||
* **Smart Over The Air Updates:** esphomeyaml has OTA updates deeply integrated into the system. It even automatically enters a recovery mode if a boot loop is detected.
|
||||
* **Powerful logging engine:** View colorful logs and debug issues remotely.
|
||||
* **Open Source**
|
||||
* For me: Makes documenting esphomelib's features a lot easier.
|
||||
|
||||
## Special Thanks
|
||||
|
||||
Special Thanks to the Home Assistant project. Lots of the code base of esphomeyaml is based off of Home Assistant, for example the loading and config validation code.
|
||||
For feature requests, please see [feature requests](https://github.com/esphome/feature-requests/issues).
|
||||
|
28
docker/Dockerfile
Normal file
28
docker/Dockerfile
Normal file
@@ -0,0 +1,28 @@
|
||||
ARG BUILD_FROM=esphome/esphome-base-amd64:2.6.0
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
# First install requirements to leverage caching when requirements don't change
|
||||
COPY requirements.txt /
|
||||
RUN pip3 install --no-cache-dir -r /requirements.txt
|
||||
|
||||
# Then copy esphome and install
|
||||
COPY . .
|
||||
RUN pip3 install --no-cache-dir -e .
|
||||
|
||||
# Settings for dashboard
|
||||
ENV USERNAME="" PASSWORD=""
|
||||
|
||||
# Expose the dashboard to Docker
|
||||
EXPOSE 6052
|
||||
|
||||
# Run healthcheck (heartbeat)
|
||||
HEALTHCHECK --interval=5m --timeout=3s \
|
||||
CMD curl --fail http://localhost:6052 || exit 1
|
||||
|
||||
# The directory the user should mount their configuration files to
|
||||
WORKDIR /config
|
||||
# Set entrypoint to esphome so that the user doesn't have to type 'esphome'
|
||||
# in every docker command twice
|
||||
ENTRYPOINT ["esphome"]
|
||||
# When no arguments given, start the dashboard in the workdir
|
||||
CMD ["/config", "dashboard"]
|
@@ -1,30 +0,0 @@
|
||||
FROM multiarch/ubuntu-core:amd64-xenial
|
||||
|
||||
# setup locals
|
||||
RUN apt-get update && apt-get install -y \
|
||||
jq \
|
||||
git \
|
||||
python3-setuptools \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# Install docker
|
||||
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
RUN apt-get update && apt-get install -y \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
curl \
|
||||
software-properties-common \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
|
||||
&& add-apt-repository "deb https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" \
|
||||
&& apt-get update && apt-get install -y docker-ce \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# setup arm binary support
|
||||
RUN apt-get update && apt-get install -y \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /data
|
13
docker/Dockerfile.dev
Normal file
13
docker/Dockerfile.dev
Normal file
@@ -0,0 +1,13 @@
|
||||
FROM esphome/esphome-base-amd64:2.6.0
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
python3-wheel \
|
||||
net-tools \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /workspaces
|
||||
ENV SHELL /bin/bash
|
@@ -1,42 +1,23 @@
|
||||
# Dockerfile for HassIO add-on
|
||||
ARG BUILD_FROM=homeassistant/amd64-base-ubuntu:latest
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python \
|
||||
python-pip \
|
||||
python-setuptools \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No && \
|
||||
platformio settings set check_libraries_interval 1000000 && \
|
||||
platformio settings set check_platformio_interval 1000000 && \
|
||||
platformio settings set check_platforms_interval 1000000
|
||||
# First install requirements to leverage caching when requirements don't change
|
||||
COPY requirements.txt /
|
||||
RUN pip3 install --no-cache-dir -r /requirements.txt
|
||||
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
# Copy root filesystem
|
||||
COPY docker/rootfs/ /
|
||||
|
||||
ARG ESPHOMELIB_VERSION="dev"
|
||||
RUN platformio lib -g install "https://github.com/OttoWinter/esphomelib.git#${ESPHOMELIB_VERSION}"
|
||||
# Then copy esphome and install
|
||||
COPY . /opt/esphome/
|
||||
RUN pip3 install --no-cache-dir -e /opt/esphome
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||
|
||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||
|
||||
# Build arugments
|
||||
ARG ADDON_ARCH
|
||||
ARG ADDON_VERSION
|
||||
# Build arguments
|
||||
ARG BUILD_VERSION=dev
|
||||
|
||||
# Labels
|
||||
LABEL \
|
||||
io.hass.name="esphomeyaml" \
|
||||
io.hass.description="esphomeyaml HassIO add-on for intelligently managing all your ESP8266/ESP32 devices." \
|
||||
io.hass.arch="${ADDON_ARCH}" \
|
||||
io.hass.name="ESPHome" \
|
||||
io.hass.description="Manage and program ESP8266/ESP32 microcontrollers through YAML configuration files" \
|
||||
io.hass.type="addon" \
|
||||
io.hass.version="${ADDON_VERSION}" \
|
||||
io.hass.url="https://esphomelib.com/esphomeyaml/index.html" \
|
||||
maintainer="Otto Winter <contact@otto-winter.com>"
|
||||
io.hass.version=${BUILD_VERSION}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
FROM python:2.7
|
||||
FROM esphome/esphome-lint-base:2.6.0
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
COPY requirements.txt requirements_test.txt /
|
||||
RUN pip3 install --no-cache-dir -r /requirements.txt -r /requirements_test.txt
|
||||
|
||||
RUN pip install -r /requirements.txt && \
|
||||
pip install flake8==3.5.0 pylint==1.9.3 tzlocal pillow
|
||||
VOLUME ["/esphome"]
|
||||
WORKDIR /esphome
|
||||
|
@@ -1,19 +0,0 @@
|
||||
FROM ubuntu:bionic
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python \
|
||||
python-pip \
|
||||
python-setuptools \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No
|
||||
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r /requirements.txt && \
|
||||
pip install --no-cache-dir tzlocal pillow
|
@@ -1,12 +0,0 @@
|
||||
; This file allows the docker build file to install the required platformio
|
||||
; platforms
|
||||
|
||||
[env:espressif8266]
|
||||
platform = espressif8266
|
||||
board = nodemcuv2
|
||||
framework = arduino
|
||||
|
||||
[env:espressif32]
|
||||
platform = espressif32
|
||||
board = nodemcu-32s
|
||||
framework = arduino
|
41
docker/rootfs/etc/cont-init.d/10-requirements.sh
Executable file
41
docker/rootfs/etc/cont-init.d/10-requirements.sh
Executable file
@@ -0,0 +1,41 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# This files check if all user configuration requirements are met
|
||||
# ==============================================================================
|
||||
|
||||
# Check SSL requirements, if enabled
|
||||
if bashio::config.true 'ssl'; then
|
||||
if ! bashio::config.has_value 'certfile'; then
|
||||
bashio::fatal 'SSL is enabled, but no certfile was specified.'
|
||||
bashio::exit.nok
|
||||
fi
|
||||
|
||||
if ! bashio::config.has_value 'keyfile'; then
|
||||
bashio::fatal 'SSL is enabled, but no keyfile was specified'
|
||||
bashio::exit.nok
|
||||
fi
|
||||
|
||||
|
||||
certfile="/ssl/$(bashio::config 'certfile')"
|
||||
keyfile="/ssl/$(bashio::config 'keyfile')"
|
||||
|
||||
if ! bashio::fs.file_exists "${certfile}"; then
|
||||
if ! bashio::fs.file_exists "${keyfile}"; then
|
||||
# Both files are missing, let's print a friendlier error message
|
||||
bashio::log.fatal 'You enabled encrypted connections using the "ssl": true option.'
|
||||
bashio::log.fatal "However, the SSL files '${certfile}' and '${keyfile}'"
|
||||
bashio::log.fatal "were not found. If you're using Hass.io on your local network and don't want"
|
||||
bashio::log.fatal 'to encrypt connections to the ESPHome dashboard, you can manually disable'
|
||||
bashio::log.fatal 'SSL by setting "ssl" to false."'
|
||||
bashio::exit.nok
|
||||
fi
|
||||
bashio::log.fatal "The configured certfile '${certfile}' was not found."
|
||||
bashio::exit.nok
|
||||
fi
|
||||
|
||||
if ! bashio::fs.file_exists "/ssl/$(bashio::config 'keyfile')"; then
|
||||
bashio::log.fatal "The configured keyfile '${keyfile}' was not found."
|
||||
bashio::exit.nok
|
||||
fi
|
||||
fi
|
34
docker/rootfs/etc/cont-init.d/20-nginx.sh
Executable file
34
docker/rootfs/etc/cont-init.d/20-nginx.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Configures NGINX for use with ESPHome
|
||||
# ==============================================================================
|
||||
|
||||
declare certfile
|
||||
declare keyfile
|
||||
declare direct_port
|
||||
declare ingress_interface
|
||||
declare ingress_port
|
||||
|
||||
mkdir -p /var/log/nginx
|
||||
|
||||
direct_port=$(bashio::addon.port 6052)
|
||||
if bashio::var.has_value "${direct_port}"; then
|
||||
if bashio::config.true 'ssl'; then
|
||||
certfile=$(bashio::config 'certfile')
|
||||
keyfile=$(bashio::config 'keyfile')
|
||||
|
||||
mv /etc/nginx/servers/direct-ssl.disabled /etc/nginx/servers/direct.conf
|
||||
sed -i "s/%%certfile%%/${certfile}/g" /etc/nginx/servers/direct.conf
|
||||
sed -i "s/%%keyfile%%/${keyfile}/g" /etc/nginx/servers/direct.conf
|
||||
else
|
||||
mv /etc/nginx/servers/direct.disabled /etc/nginx/servers/direct.conf
|
||||
fi
|
||||
|
||||
sed -i "s/%%port%%/${direct_port}/g" /etc/nginx/servers/direct.conf
|
||||
fi
|
||||
|
||||
ingress_port=$(bashio::addon.ingress_port)
|
||||
ingress_interface=$(bashio::addon.ip_address)
|
||||
sed -i "s/%%port%%/${ingress_port}/g" /etc/nginx/servers/ingress.conf
|
||||
sed -i "s/%%interface%%/${ingress_interface}/g" /etc/nginx/servers/ingress.conf
|
23
docker/rootfs/etc/cont-init.d/30-esphome.sh
Executable file
23
docker/rootfs/etc/cont-init.d/30-esphome.sh
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# This files installs the user ESPHome version if specified
|
||||
# ==============================================================================
|
||||
|
||||
declare esphome_version
|
||||
|
||||
if bashio::config.has_value 'esphome_version'; then
|
||||
esphome_version=$(bashio::config 'esphome_version')
|
||||
if [[ $esphome_version == *":"* ]]; then
|
||||
IFS=':' read -r -a array <<< "$esphome_version"
|
||||
username=${array[0]}
|
||||
ref=${array[1]}
|
||||
else
|
||||
username="esphome"
|
||||
ref=$esphome_version
|
||||
fi
|
||||
full_url="https://github.com/${username}/esphome/archive/${ref}.zip"
|
||||
bashio::log.info "Installing esphome version '${esphome_version}' (${full_url})..."
|
||||
pip3 install -U --no-cache-dir "${full_url}" \
|
||||
|| bashio::exit.nok "Failed installing esphome pinned version."
|
||||
fi
|
11
docker/rootfs/etc/cont-init.d/40-migrate.sh
Executable file
11
docker/rootfs/etc/cont-init.d/40-migrate.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# This files migrates the esphome config directory from the old path
|
||||
# ==============================================================================
|
||||
|
||||
if [[ ! -d /config/esphome && -d /config/esphomeyaml ]]; then
|
||||
echo "Moving config directory from /config/esphomeyaml to /config/esphome"
|
||||
mv /config/esphomeyaml /config/esphome
|
||||
mv /config/esphome/.esphomeyaml /config/esphome/.esphome
|
||||
fi
|
96
docker/rootfs/etc/nginx/includes/mime.types
Normal file
96
docker/rootfs/etc/nginx/includes/mime.types
Normal file
@@ -0,0 +1,96 @@
|
||||
types {
|
||||
text/html html htm shtml;
|
||||
text/css css;
|
||||
text/xml xml;
|
||||
image/gif gif;
|
||||
image/jpeg jpeg jpg;
|
||||
application/javascript js;
|
||||
application/atom+xml atom;
|
||||
application/rss+xml rss;
|
||||
|
||||
text/mathml mml;
|
||||
text/plain txt;
|
||||
text/vnd.sun.j2me.app-descriptor jad;
|
||||
text/vnd.wap.wml wml;
|
||||
text/x-component htc;
|
||||
|
||||
image/png png;
|
||||
image/svg+xml svg svgz;
|
||||
image/tiff tif tiff;
|
||||
image/vnd.wap.wbmp wbmp;
|
||||
image/webp webp;
|
||||
image/x-icon ico;
|
||||
image/x-jng jng;
|
||||
image/x-ms-bmp bmp;
|
||||
|
||||
font/woff woff;
|
||||
font/woff2 woff2;
|
||||
|
||||
application/java-archive jar war ear;
|
||||
application/json json;
|
||||
application/mac-binhex40 hqx;
|
||||
application/msword doc;
|
||||
application/pdf pdf;
|
||||
application/postscript ps eps ai;
|
||||
application/rtf rtf;
|
||||
application/vnd.apple.mpegurl m3u8;
|
||||
application/vnd.google-earth.kml+xml kml;
|
||||
application/vnd.google-earth.kmz kmz;
|
||||
application/vnd.ms-excel xls;
|
||||
application/vnd.ms-fontobject eot;
|
||||
application/vnd.ms-powerpoint ppt;
|
||||
application/vnd.oasis.opendocument.graphics odg;
|
||||
application/vnd.oasis.opendocument.presentation odp;
|
||||
application/vnd.oasis.opendocument.spreadsheet ods;
|
||||
application/vnd.oasis.opendocument.text odt;
|
||||
application/vnd.openxmlformats-officedocument.presentationml.presentation
|
||||
pptx;
|
||||
application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
|
||||
xlsx;
|
||||
application/vnd.openxmlformats-officedocument.wordprocessingml.document
|
||||
docx;
|
||||
application/vnd.wap.wmlc wmlc;
|
||||
application/x-7z-compressed 7z;
|
||||
application/x-cocoa cco;
|
||||
application/x-java-archive-diff jardiff;
|
||||
application/x-java-jnlp-file jnlp;
|
||||
application/x-makeself run;
|
||||
application/x-perl pl pm;
|
||||
application/x-pilot prc pdb;
|
||||
application/x-rar-compressed rar;
|
||||
application/x-redhat-package-manager rpm;
|
||||
application/x-sea sea;
|
||||
application/x-shockwave-flash swf;
|
||||
application/x-stuffit sit;
|
||||
application/x-tcl tcl tk;
|
||||
application/x-x509-ca-cert der pem crt;
|
||||
application/x-xpinstall xpi;
|
||||
application/xhtml+xml xhtml;
|
||||
application/xspf+xml xspf;
|
||||
application/zip zip;
|
||||
|
||||
application/octet-stream bin exe dll;
|
||||
application/octet-stream deb;
|
||||
application/octet-stream dmg;
|
||||
application/octet-stream iso img;
|
||||
application/octet-stream msi msp msm;
|
||||
|
||||
audio/midi mid midi kar;
|
||||
audio/mpeg mp3;
|
||||
audio/ogg ogg;
|
||||
audio/x-m4a m4a;
|
||||
audio/x-realaudio ra;
|
||||
|
||||
video/3gpp 3gpp 3gp;
|
||||
video/mp2t ts;
|
||||
video/mp4 mp4;
|
||||
video/mpeg mpeg mpg;
|
||||
video/quicktime mov;
|
||||
video/webm webm;
|
||||
video/x-flv flv;
|
||||
video/x-m4v m4v;
|
||||
video/x-mng mng;
|
||||
video/x-ms-asf asx asf;
|
||||
video/x-ms-wmv wmv;
|
||||
video/x-msvideo avi;
|
||||
}
|
16
docker/rootfs/etc/nginx/includes/proxy_params.conf
Normal file
16
docker/rootfs/etc/nginx/includes/proxy_params.conf
Normal file
@@ -0,0 +1,16 @@
|
||||
proxy_http_version 1.1;
|
||||
proxy_ignore_client_abort off;
|
||||
proxy_read_timeout 86400s;
|
||||
proxy_redirect off;
|
||||
proxy_send_timeout 86400s;
|
||||
proxy_max_temp_file_size 0;
|
||||
|
||||
proxy_set_header Accept-Encoding "";
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-NginX-Proxy true;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Authorization "";
|
6
docker/rootfs/etc/nginx/includes/server_params.conf
Normal file
6
docker/rootfs/etc/nginx/includes/server_params.conf
Normal file
@@ -0,0 +1,6 @@
|
||||
root /dev/null;
|
||||
server_name $hostname;
|
||||
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
add_header X-XSS-Protection "1; mode=block";
|
||||
add_header X-Robots-Tag none;
|
9
docker/rootfs/etc/nginx/includes/ssl_params.conf
Normal file
9
docker/rootfs/etc/nginx/includes/ssl_params.conf
Normal file
@@ -0,0 +1,9 @@
|
||||
ssl_protocols TLSv1.2;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:DHE-RSA-AES256-SHA;
|
||||
ssl_ecdh_curve secp384r1;
|
||||
ssl_session_timeout 10m;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_tickets off;
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
33
docker/rootfs/etc/nginx/nginx.conf
Normal file
33
docker/rootfs/etc/nginx/nginx.conf
Normal file
@@ -0,0 +1,33 @@
|
||||
daemon off;
|
||||
user root;
|
||||
pid /var/run/nginx.pid;
|
||||
worker_processes 1;
|
||||
# Hass.io addon log
|
||||
error_log /proc/1/fd/1 error;
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/includes/mime.types;
|
||||
access_log stdout;
|
||||
default_type application/octet-stream;
|
||||
gzip on;
|
||||
keepalive_timeout 65;
|
||||
sendfile on;
|
||||
server_tokens off;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
# Use Hass.io supervisor as resolver
|
||||
resolver 172.30.32.2;
|
||||
|
||||
upstream esphome {
|
||||
server unix:/var/run/esphome.sock;
|
||||
}
|
||||
|
||||
include /etc/nginx/servers/*.conf;
|
||||
}
|
22
docker/rootfs/etc/nginx/servers/direct-ssl.disabled
Normal file
22
docker/rootfs/etc/nginx/servers/direct-ssl.disabled
Normal file
@@ -0,0 +1,22 @@
|
||||
server {
|
||||
listen %%port%% default_server ssl http2;
|
||||
|
||||
include /etc/nginx/includes/server_params.conf;
|
||||
include /etc/nginx/includes/proxy_params.conf;
|
||||
include /etc/nginx/includes/ssl_params.conf;
|
||||
|
||||
ssl on;
|
||||
ssl_certificate /ssl/%%certfile%%;
|
||||
ssl_certificate_key /ssl/%%keyfile%%;
|
||||
|
||||
# Clear Hass.io Ingress header
|
||||
proxy_set_header X-Hassio-Ingress "";
|
||||
|
||||
# Redirect http requests to https on the same port.
|
||||
# https://rageagainstshell.com/2016/11/redirect-http-to-https-on-the-same-port-in-nginx/
|
||||
error_page 497 https://$http_host$request_uri;
|
||||
|
||||
location / {
|
||||
proxy_pass http://esphome;
|
||||
}
|
||||
}
|
12
docker/rootfs/etc/nginx/servers/direct.disabled
Normal file
12
docker/rootfs/etc/nginx/servers/direct.disabled
Normal file
@@ -0,0 +1,12 @@
|
||||
server {
|
||||
listen %%port%% default_server;
|
||||
|
||||
include /etc/nginx/includes/server_params.conf;
|
||||
include /etc/nginx/includes/proxy_params.conf;
|
||||
# Clear Hass.io Ingress header
|
||||
proxy_set_header X-Hassio-Ingress "";
|
||||
|
||||
location / {
|
||||
proxy_pass http://esphome;
|
||||
}
|
||||
}
|
16
docker/rootfs/etc/nginx/servers/ingress.conf
Normal file
16
docker/rootfs/etc/nginx/servers/ingress.conf
Normal file
@@ -0,0 +1,16 @@
|
||||
server {
|
||||
listen %%interface%%:%%port%% default_server;
|
||||
|
||||
include /etc/nginx/includes/server_params.conf;
|
||||
include /etc/nginx/includes/proxy_params.conf;
|
||||
# Set Hass.io Ingress header
|
||||
proxy_set_header X-Hassio-Ingress "YES";
|
||||
|
||||
location / {
|
||||
# Only allow from Hass.io supervisor
|
||||
allow 172.30.32.2;
|
||||
deny all;
|
||||
|
||||
proxy_pass http://esphome;
|
||||
}
|
||||
}
|
9
docker/rootfs/etc/services.d/esphome/finish
Executable file
9
docker/rootfs/etc/services.d/esphome/finish
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Take down the S6 supervision tree when ESPHome fails
|
||||
# ==============================================================================
|
||||
if -n { s6-test $# -ne 0 }
|
||||
if -n { s6-test ${1} -eq 256 }
|
||||
|
||||
s6-svscanctl -t /var/run/s6/services
|
26
docker/rootfs/etc/services.d/esphome/run
Executable file
26
docker/rootfs/etc/services.d/esphome/run
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Runs the ESPHome dashboard
|
||||
# ==============================================================================
|
||||
|
||||
export ESPHOME_IS_HASSIO=true
|
||||
|
||||
if bashio::config.true 'leave_front_door_open'; then
|
||||
export DISABLE_HA_AUTHENTICATION=true
|
||||
fi
|
||||
|
||||
if bashio::config.true 'streamer_mode'; then
|
||||
export ESPHOME_STREAMER_MODE=true
|
||||
fi
|
||||
|
||||
if bashio::config.true 'status_use_ping'; then
|
||||
export ESPHOME_DASHBOARD_USE_PING=true
|
||||
fi
|
||||
|
||||
if bashio::config.has_value 'relative_url'; then
|
||||
export ESPHOME_DASHBOARD_RELATIVE_URL=$(bashio::config 'relative_url')
|
||||
fi
|
||||
|
||||
bashio::log.info "Starting ESPHome dashboard..."
|
||||
exec esphome /config/esphome dashboard --socket /var/run/esphome.sock --hassio
|
9
docker/rootfs/etc/services.d/nginx/finish
Executable file
9
docker/rootfs/etc/services.d/nginx/finish
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Take down the S6 supervision tree when NGINX fails
|
||||
# ==============================================================================
|
||||
if -n { s6-test $# -ne 0 }
|
||||
if -n { s6-test ${1} -eq 256 }
|
||||
|
||||
s6-svscanctl -t /var/run/s6/services
|
14
docker/rootfs/etc/services.d/nginx/run
Executable file
14
docker/rootfs/etc/services.d/nginx/run
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Community Hass.io Add-ons: ESPHome
|
||||
# Runs the NGINX proxy
|
||||
# ==============================================================================
|
||||
|
||||
bashio::log.info "Waiting for dashboard to come up..."
|
||||
|
||||
while [[ ! -S /var/run/esphome.sock ]]; do
|
||||
sleep 0.5
|
||||
done
|
||||
|
||||
bashio::log.info "Starting NGINX..."
|
||||
exec nginx
|
566
esphome/__main__.py
Normal file
566
esphome/__main__.py
Normal file
@@ -0,0 +1,566 @@
|
||||
import argparse
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from esphome import const, writer, yaml_util
|
||||
import esphome.codegen as cg
|
||||
from esphome.config import iter_components, read_config, strip_default_ids
|
||||
from esphome.const import CONF_BAUD_RATE, CONF_BROKER, CONF_LOGGER, CONF_OTA, \
|
||||
CONF_PASSWORD, CONF_PORT, CONF_ESPHOME, CONF_PLATFORMIO_OPTIONS
|
||||
from esphome.core import CORE, EsphomeError, coroutine, coroutine_with_priority
|
||||
from esphome.helpers import color, indent
|
||||
from esphome.util import run_external_command, run_external_process, safe_print, list_yaml_files, \
|
||||
get_serial_ports
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def choose_prompt(options):
|
||||
if not options:
|
||||
raise EsphomeError("Found no valid options for upload/logging, please make sure relevant "
|
||||
"sections (ota, api, mqtt, ...) are in your configuration and/or the "
|
||||
"device is plugged in.")
|
||||
|
||||
if len(options) == 1:
|
||||
return options[0][1]
|
||||
|
||||
safe_print("Found multiple options, please choose one:")
|
||||
for i, (desc, _) in enumerate(options):
|
||||
safe_print(f" [{i+1}] {desc}")
|
||||
|
||||
while True:
|
||||
opt = input('(number): ')
|
||||
if opt in options:
|
||||
opt = options.index(opt)
|
||||
break
|
||||
try:
|
||||
opt = int(opt)
|
||||
if opt < 1 or opt > len(options):
|
||||
raise ValueError
|
||||
break
|
||||
except ValueError:
|
||||
safe_print(color('red', f"Invalid option: '{opt}'"))
|
||||
return options[opt - 1][1]
|
||||
|
||||
|
||||
def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api):
|
||||
options = []
|
||||
for port in get_serial_ports():
|
||||
options.append((f"{port.path} ({port.description})", port.path))
|
||||
if (show_ota and 'ota' in CORE.config) or (show_api and 'api' in CORE.config):
|
||||
options.append((f"Over The Air ({CORE.address})", CORE.address))
|
||||
if default == 'OTA':
|
||||
return CORE.address
|
||||
if show_mqtt and 'mqtt' in CORE.config:
|
||||
options.append(("MQTT ({})".format(CORE.config['mqtt'][CONF_BROKER]), 'MQTT'))
|
||||
if default == 'OTA':
|
||||
return 'MQTT'
|
||||
if default is not None:
|
||||
return default
|
||||
if check_default is not None and check_default in [opt[1] for opt in options]:
|
||||
return check_default
|
||||
return choose_prompt(options)
|
||||
|
||||
|
||||
def get_port_type(port):
|
||||
if port.startswith('/') or port.startswith('COM'):
|
||||
return 'SERIAL'
|
||||
if port == 'MQTT':
|
||||
return 'MQTT'
|
||||
return 'NETWORK'
|
||||
|
||||
|
||||
def run_miniterm(config, port):
|
||||
import serial
|
||||
from esphome import platformio_api
|
||||
|
||||
if CONF_LOGGER not in config:
|
||||
_LOGGER.info("Logger is not enabled. Not starting UART logs.")
|
||||
return
|
||||
baud_rate = config['logger'][CONF_BAUD_RATE]
|
||||
if baud_rate == 0:
|
||||
_LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
|
||||
_LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
|
||||
|
||||
backtrace_state = False
|
||||
with serial.Serial(port, baudrate=baud_rate) as ser:
|
||||
while True:
|
||||
try:
|
||||
raw = ser.readline()
|
||||
except serial.SerialException:
|
||||
_LOGGER.error("Serial port closed!")
|
||||
return
|
||||
line = raw.replace(b'\r', b'').replace(b'\n', b'').decode('utf8', 'backslashreplace')
|
||||
time = datetime.now().time().strftime('[%H:%M:%S]')
|
||||
message = time + line
|
||||
safe_print(message)
|
||||
|
||||
backtrace_state = platformio_api.process_stacktrace(
|
||||
config, line, backtrace_state=backtrace_state)
|
||||
|
||||
|
||||
def wrap_to_code(name, comp):
|
||||
coro = coroutine(comp.to_code)
|
||||
|
||||
@functools.wraps(comp.to_code)
|
||||
@coroutine_with_priority(coro.priority)
|
||||
def wrapped(conf):
|
||||
cg.add(cg.LineComment(f"{name}:"))
|
||||
if comp.config_schema is not None:
|
||||
conf_str = yaml_util.dump(conf)
|
||||
conf_str = conf_str.replace('//', '')
|
||||
cg.add(cg.LineComment(indent(conf_str)))
|
||||
yield coro(conf)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def write_cpp(config):
|
||||
generate_cpp_contents(config)
|
||||
return write_cpp_file()
|
||||
|
||||
|
||||
def generate_cpp_contents(config):
|
||||
_LOGGER.info("Generating C++ source...")
|
||||
|
||||
for name, component, conf in iter_components(CORE.config):
|
||||
if component.to_code is not None:
|
||||
coro = wrap_to_code(name, component)
|
||||
CORE.add_job(coro, conf)
|
||||
|
||||
CORE.flush_tasks()
|
||||
|
||||
|
||||
def write_cpp_file():
|
||||
writer.write_platformio_project()
|
||||
|
||||
code_s = indent(CORE.cpp_main_section)
|
||||
writer.write_cpp(code_s)
|
||||
return 0
|
||||
|
||||
|
||||
def compile_program(args, config):
|
||||
from esphome import platformio_api
|
||||
|
||||
_LOGGER.info("Compiling app...")
|
||||
return platformio_api.run_compile(config, CORE.verbose)
|
||||
|
||||
|
||||
def upload_using_esptool(config, port):
|
||||
path = CORE.firmware_bin
|
||||
first_baudrate = config[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS].get('upload_speed', 460800)
|
||||
|
||||
def run_esptool(baud_rate):
|
||||
cmd = ['esptool.py', '--before', 'default_reset', '--after', 'hard_reset',
|
||||
'--baud', str(baud_rate),
|
||||
'--chip', 'esp8266', '--port', port, 'write_flash', '0x0', path]
|
||||
|
||||
if os.environ.get('ESPHOME_USE_SUBPROCESS') is None:
|
||||
import esptool
|
||||
# pylint: disable=protected-access
|
||||
return run_external_command(esptool._main, *cmd)
|
||||
|
||||
return run_external_process(*cmd)
|
||||
|
||||
rc = run_esptool(first_baudrate)
|
||||
if rc == 0 or first_baudrate == 115200:
|
||||
return rc
|
||||
# Try with 115200 baud rate, with some serial chips the faster baud rates do not work well
|
||||
_LOGGER.info("Upload with baud rate %s failed. Trying again with baud rate 115200.",
|
||||
first_baudrate)
|
||||
return run_esptool(115200)
|
||||
|
||||
|
||||
def upload_program(config, args, host):
|
||||
# if upload is to a serial port use platformio, otherwise assume ota
|
||||
if get_port_type(host) == 'SERIAL':
|
||||
from esphome import platformio_api
|
||||
|
||||
if CORE.is_esp8266:
|
||||
return upload_using_esptool(config, host)
|
||||
return platformio_api.run_upload(config, CORE.verbose, host)
|
||||
|
||||
from esphome import espota2
|
||||
|
||||
if CONF_OTA not in config:
|
||||
raise EsphomeError("Cannot upload Over the Air as the config does not include the ota: "
|
||||
"component")
|
||||
|
||||
ota_conf = config[CONF_OTA]
|
||||
remote_port = ota_conf[CONF_PORT]
|
||||
password = ota_conf[CONF_PASSWORD]
|
||||
return espota2.run_ota(host, remote_port, password, CORE.firmware_bin)
|
||||
|
||||
|
||||
def show_logs(config, args, port):
|
||||
if 'logger' not in config:
|
||||
raise EsphomeError("Logger is not configured!")
|
||||
if get_port_type(port) == 'SERIAL':
|
||||
run_miniterm(config, port)
|
||||
return 0
|
||||
if get_port_type(port) == 'NETWORK' and 'api' in config:
|
||||
from esphome.api.client import run_logs
|
||||
|
||||
return run_logs(config, port)
|
||||
if get_port_type(port) == 'MQTT' and 'mqtt' in config:
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.show_logs(config, args.topic, args.username, args.password, args.client_id)
|
||||
|
||||
raise EsphomeError("No remote or local logging method configured (api/mqtt/logger)")
|
||||
|
||||
|
||||
def clean_mqtt(config, args):
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.clear_topic(config, args.topic, args.username, args.password, args.client_id)
|
||||
|
||||
|
||||
def setup_log(debug=False, quiet=False):
|
||||
if debug:
|
||||
log_level = logging.DEBUG
|
||||
CORE.verbose = True
|
||||
elif quiet:
|
||||
log_level = logging.CRITICAL
|
||||
else:
|
||||
log_level = logging.INFO
|
||||
logging.basicConfig(level=log_level)
|
||||
fmt = "%(levelname)s %(message)s"
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
datefmt = '%H:%M:%S'
|
||||
|
||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||
|
||||
try:
|
||||
import colorama
|
||||
colorama.init(strip=True)
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
'DEBUG': 'cyan',
|
||||
'INFO': 'green',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red',
|
||||
}
|
||||
))
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def command_wizard(args):
|
||||
from esphome import wizard
|
||||
|
||||
return wizard.wizard(args.configuration[0])
|
||||
|
||||
|
||||
def command_config(args, config):
|
||||
_LOGGER.info("Configuration is valid!")
|
||||
if not CORE.verbose:
|
||||
config = strip_default_ids(config)
|
||||
safe_print(yaml_util.dump(config))
|
||||
return 0
|
||||
|
||||
|
||||
def command_vscode(args):
|
||||
from esphome import vscode
|
||||
|
||||
CORE.config_path = args.configuration[0]
|
||||
vscode.read_config(args)
|
||||
|
||||
|
||||
def command_compile(args, config):
|
||||
exit_code = write_cpp(config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
if args.only_generate:
|
||||
_LOGGER.info("Successfully generated source code.")
|
||||
return 0
|
||||
exit_code = compile_program(args, config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info("Successfully compiled program.")
|
||||
return 0
|
||||
|
||||
|
||||
def command_upload(args, config):
|
||||
port = choose_upload_log_host(default=args.upload_port, check_default=None,
|
||||
show_ota=True, show_mqtt=False, show_api=False)
|
||||
exit_code = upload_program(config, args, port)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info("Successfully uploaded program.")
|
||||
return 0
|
||||
|
||||
|
||||
def command_logs(args, config):
|
||||
port = choose_upload_log_host(default=args.serial_port, check_default=None,
|
||||
show_ota=False, show_mqtt=True, show_api=True)
|
||||
return show_logs(config, args, port)
|
||||
|
||||
|
||||
def command_run(args, config):
|
||||
exit_code = write_cpp(config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
exit_code = compile_program(args, config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info("Successfully compiled program.")
|
||||
port = choose_upload_log_host(default=args.upload_port, check_default=None,
|
||||
show_ota=True, show_mqtt=False, show_api=True)
|
||||
exit_code = upload_program(config, args, port)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info("Successfully uploaded program.")
|
||||
if args.no_logs:
|
||||
return 0
|
||||
port = choose_upload_log_host(default=args.upload_port, check_default=port,
|
||||
show_ota=False, show_mqtt=True, show_api=True)
|
||||
return show_logs(config, args, port)
|
||||
|
||||
|
||||
def command_clean_mqtt(args, config):
|
||||
return clean_mqtt(config, args)
|
||||
|
||||
|
||||
def command_mqtt_fingerprint(args, config):
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.get_fingerprint(config)
|
||||
|
||||
|
||||
def command_version(args):
|
||||
safe_print(f"Version: {const.__version__}")
|
||||
return 0
|
||||
|
||||
|
||||
def command_clean(args, config):
|
||||
try:
|
||||
writer.clean_build()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error deleting build files: %s", err)
|
||||
return 1
|
||||
_LOGGER.info("Done!")
|
||||
return 0
|
||||
|
||||
|
||||
def command_dashboard(args):
|
||||
from esphome.dashboard import dashboard
|
||||
|
||||
return dashboard.start_web_server(args)
|
||||
|
||||
|
||||
def command_update_all(args):
|
||||
import click
|
||||
|
||||
success = {}
|
||||
files = list_yaml_files(args.configuration[0])
|
||||
twidth = 60
|
||||
|
||||
def print_bar(middle_text):
|
||||
middle_text = f" {middle_text} "
|
||||
width = len(click.unstyle(middle_text))
|
||||
half_line = "=" * ((twidth - width) // 2)
|
||||
click.echo(f"{half_line}{middle_text}{half_line}")
|
||||
|
||||
for f in files:
|
||||
print("Updating {}".format(color('cyan', f)))
|
||||
print('-' * twidth)
|
||||
print()
|
||||
rc = run_external_process('esphome', '--dashboard', f, 'run', '--no-logs', '--upload-port',
|
||||
'OTA')
|
||||
if rc == 0:
|
||||
print_bar("[{}] {}".format(color('bold_green', 'SUCCESS'), f))
|
||||
success[f] = True
|
||||
else:
|
||||
print_bar("[{}] {}".format(color('bold_red', 'ERROR'), f))
|
||||
success[f] = False
|
||||
|
||||
print()
|
||||
print()
|
||||
print()
|
||||
|
||||
print_bar('[{}]'.format(color('bold_white', 'SUMMARY')))
|
||||
failed = 0
|
||||
for f in files:
|
||||
if success[f]:
|
||||
print(" - {}: {}".format(f, color('green', 'SUCCESS')))
|
||||
else:
|
||||
print(" - {}: {}".format(f, color('bold_red', 'FAILED')))
|
||||
failed += 1
|
||||
return failed
|
||||
|
||||
|
||||
PRE_CONFIG_ACTIONS = {
|
||||
'wizard': command_wizard,
|
||||
'version': command_version,
|
||||
'dashboard': command_dashboard,
|
||||
'vscode': command_vscode,
|
||||
'update-all': command_update_all,
|
||||
}
|
||||
|
||||
POST_CONFIG_ACTIONS = {
|
||||
'config': command_config,
|
||||
'compile': command_compile,
|
||||
'upload': command_upload,
|
||||
'logs': command_logs,
|
||||
'run': command_run,
|
||||
'clean-mqtt': command_clean_mqtt,
|
||||
'mqtt-fingerprint': command_mqtt_fingerprint,
|
||||
'clean': command_clean,
|
||||
}
|
||||
|
||||
|
||||
def parse_args(argv):
|
||||
parser = argparse.ArgumentParser(description=f'ESPHome v{const.__version__}')
|
||||
parser.add_argument('-v', '--verbose', help="Enable verbose esphome logs.",
|
||||
action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help="Disable all esphome logs.",
|
||||
action='store_true')
|
||||
parser.add_argument('--dashboard', help=argparse.SUPPRESS, action='store_true')
|
||||
parser.add_argument('-s', '--substitution', nargs=2, action='append',
|
||||
help='Add a substitution', metavar=('key', 'value'))
|
||||
parser.add_argument('configuration', help='Your YAML configuration file.', nargs='*')
|
||||
|
||||
subparsers = parser.add_subparsers(help='Commands', dest='command')
|
||||
subparsers.required = True
|
||||
subparsers.add_parser('config', help='Validate the configuration and spit it out.')
|
||||
|
||||
parser_compile = subparsers.add_parser('compile',
|
||||
help='Read the configuration and compile a program.')
|
||||
parser_compile.add_argument('--only-generate',
|
||||
help="Only generate source code, do not compile.",
|
||||
action='store_true')
|
||||
|
||||
parser_upload = subparsers.add_parser('upload', help='Validate the configuration '
|
||||
'and upload the latest binary.')
|
||||
parser_upload.add_argument('--upload-port', help="Manually specify the upload port to use. "
|
||||
"For example /dev/cu.SLAB_USBtoUART.")
|
||||
|
||||
parser_logs = subparsers.add_parser('logs', help='Validate the configuration '
|
||||
'and show all MQTT logs.')
|
||||
parser_logs.add_argument('--topic', help='Manually set the topic to subscribe to.')
|
||||
parser_logs.add_argument('--username', help='Manually set the username.')
|
||||
parser_logs.add_argument('--password', help='Manually set the password.')
|
||||
parser_logs.add_argument('--client-id', help='Manually set the client id.')
|
||||
parser_logs.add_argument('--serial-port', help="Manually specify a serial port to use"
|
||||
"For example /dev/cu.SLAB_USBtoUART.")
|
||||
|
||||
parser_run = subparsers.add_parser('run', help='Validate the configuration, create a binary, '
|
||||
'upload it, and start MQTT logs.')
|
||||
parser_run.add_argument('--upload-port', help="Manually specify the upload port/ip to use. "
|
||||
"For example /dev/cu.SLAB_USBtoUART.")
|
||||
parser_run.add_argument('--no-logs', help='Disable starting MQTT logs.',
|
||||
action='store_true')
|
||||
parser_run.add_argument('--topic', help='Manually set the topic to subscribe to for logs.')
|
||||
parser_run.add_argument('--username', help='Manually set the MQTT username for logs.')
|
||||
parser_run.add_argument('--password', help='Manually set the MQTT password for logs.')
|
||||
parser_run.add_argument('--client-id', help='Manually set the client id for logs.')
|
||||
|
||||
parser_clean = subparsers.add_parser('clean-mqtt', help="Helper to clear an MQTT topic from "
|
||||
"retain messages.")
|
||||
parser_clean.add_argument('--topic', help='Manually set the topic to subscribe to.')
|
||||
parser_clean.add_argument('--username', help='Manually set the username.')
|
||||
parser_clean.add_argument('--password', help='Manually set the password.')
|
||||
parser_clean.add_argument('--client-id', help='Manually set the client id.')
|
||||
|
||||
subparsers.add_parser('wizard', help="A helpful setup wizard that will guide "
|
||||
"you through setting up esphome.")
|
||||
|
||||
subparsers.add_parser('mqtt-fingerprint', help="Get the SSL fingerprint from a MQTT broker.")
|
||||
|
||||
subparsers.add_parser('version', help="Print the esphome version and exit.")
|
||||
|
||||
subparsers.add_parser('clean', help="Delete all temporary build files.")
|
||||
|
||||
dashboard = subparsers.add_parser('dashboard',
|
||||
help="Create a simple web server for a dashboard.")
|
||||
dashboard.add_argument("--port", help="The HTTP port to open connections on. Defaults to 6052.",
|
||||
type=int, default=6052)
|
||||
dashboard.add_argument("--username", help="The optional username to require "
|
||||
"for authentication.",
|
||||
type=str, default='')
|
||||
dashboard.add_argument("--password", help="The optional password to require "
|
||||
"for authentication.",
|
||||
type=str, default='')
|
||||
dashboard.add_argument("--open-ui", help="Open the dashboard UI in a browser.",
|
||||
action='store_true')
|
||||
dashboard.add_argument("--hassio",
|
||||
help=argparse.SUPPRESS,
|
||||
action="store_true")
|
||||
dashboard.add_argument("--socket",
|
||||
help="Make the dashboard serve under a unix socket", type=str)
|
||||
|
||||
vscode = subparsers.add_parser('vscode', help=argparse.SUPPRESS)
|
||||
vscode.add_argument('--ace', action='store_true')
|
||||
|
||||
subparsers.add_parser('update-all', help=argparse.SUPPRESS)
|
||||
|
||||
return parser.parse_args(argv[1:])
|
||||
|
||||
|
||||
def run_esphome(argv):
|
||||
args = parse_args(argv)
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
setup_log(args.verbose, args.quiet)
|
||||
if args.command != 'version' and not args.configuration:
|
||||
_LOGGER.error("Missing configuration parameter, see esphome --help.")
|
||||
return 1
|
||||
|
||||
if sys.version_info < (3, 6, 0):
|
||||
_LOGGER.error("You're running ESPHome with Python <3.6. ESPHome is no longer compatible "
|
||||
"with this Python version. Please reinstall ESPHome with Python 3.6+")
|
||||
return 1
|
||||
|
||||
if args.command in PRE_CONFIG_ACTIONS:
|
||||
try:
|
||||
return PRE_CONFIG_ACTIONS[args.command](args)
|
||||
except EsphomeError as e:
|
||||
_LOGGER.error(e)
|
||||
return 1
|
||||
|
||||
for conf_path in args.configuration:
|
||||
CORE.config_path = conf_path
|
||||
CORE.dashboard = args.dashboard
|
||||
|
||||
config = read_config(dict(args.substitution) if args.substitution else {})
|
||||
if config is None:
|
||||
return 1
|
||||
CORE.config = config
|
||||
|
||||
if args.command not in POST_CONFIG_ACTIONS:
|
||||
safe_print(f"Unknown command {args.command}")
|
||||
|
||||
try:
|
||||
rc = POST_CONFIG_ACTIONS[args.command](args, config)
|
||||
except EsphomeError as e:
|
||||
_LOGGER.error(e)
|
||||
return 1
|
||||
if rc != 0:
|
||||
return rc
|
||||
|
||||
CORE.reset()
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
return run_esphome(sys.argv)
|
||||
except EsphomeError as e:
|
||||
_LOGGER.error(e)
|
||||
return 1
|
||||
except KeyboardInterrupt:
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
2485
esphome/api/api_pb2.py
Normal file
2485
esphome/api/api_pb2.py
Normal file
File diff suppressed because one or more lines are too long
490
esphome/api/client.py
Normal file
490
esphome/api/client.py
Normal file
@@ -0,0 +1,490 @@
|
||||
from datetime import datetime
|
||||
import functools
|
||||
import logging
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from typing import Optional # noqa
|
||||
from google.protobuf import message # noqa
|
||||
|
||||
from esphome import const
|
||||
import esphome.api.api_pb2 as pb
|
||||
from esphome.const import CONF_PASSWORD, CONF_PORT
|
||||
from esphome.core import EsphomeError
|
||||
from esphome.helpers import resolve_ip_address, indent, color
|
||||
from esphome.util import safe_print
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIConnectionError(EsphomeError):
|
||||
pass
|
||||
|
||||
|
||||
MESSAGE_TYPE_TO_PROTO = {
|
||||
1: pb.HelloRequest,
|
||||
2: pb.HelloResponse,
|
||||
3: pb.ConnectRequest,
|
||||
4: pb.ConnectResponse,
|
||||
5: pb.DisconnectRequest,
|
||||
6: pb.DisconnectResponse,
|
||||
7: pb.PingRequest,
|
||||
8: pb.PingResponse,
|
||||
9: pb.DeviceInfoRequest,
|
||||
10: pb.DeviceInfoResponse,
|
||||
11: pb.ListEntitiesRequest,
|
||||
12: pb.ListEntitiesBinarySensorResponse,
|
||||
13: pb.ListEntitiesCoverResponse,
|
||||
14: pb.ListEntitiesFanResponse,
|
||||
15: pb.ListEntitiesLightResponse,
|
||||
16: pb.ListEntitiesSensorResponse,
|
||||
17: pb.ListEntitiesSwitchResponse,
|
||||
18: pb.ListEntitiesTextSensorResponse,
|
||||
19: pb.ListEntitiesDoneResponse,
|
||||
20: pb.SubscribeStatesRequest,
|
||||
21: pb.BinarySensorStateResponse,
|
||||
22: pb.CoverStateResponse,
|
||||
23: pb.FanStateResponse,
|
||||
24: pb.LightStateResponse,
|
||||
25: pb.SensorStateResponse,
|
||||
26: pb.SwitchStateResponse,
|
||||
27: pb.TextSensorStateResponse,
|
||||
28: pb.SubscribeLogsRequest,
|
||||
29: pb.SubscribeLogsResponse,
|
||||
30: pb.CoverCommandRequest,
|
||||
31: pb.FanCommandRequest,
|
||||
32: pb.LightCommandRequest,
|
||||
33: pb.SwitchCommandRequest,
|
||||
34: pb.SubscribeServiceCallsRequest,
|
||||
35: pb.ServiceCallResponse,
|
||||
36: pb.GetTimeRequest,
|
||||
37: pb.GetTimeResponse,
|
||||
}
|
||||
|
||||
|
||||
def _varuint_to_bytes(value):
|
||||
if value <= 0x7F:
|
||||
return bytes([value])
|
||||
|
||||
ret = bytes()
|
||||
while value:
|
||||
temp = value & 0x7F
|
||||
value >>= 7
|
||||
if value:
|
||||
ret += bytes([temp | 0x80])
|
||||
else:
|
||||
ret += bytes([temp])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _bytes_to_varuint(value):
|
||||
result = 0
|
||||
bitpos = 0
|
||||
for val in value:
|
||||
result |= (val & 0x7F) << bitpos
|
||||
bitpos += 7
|
||||
if (val & 0x80) == 0:
|
||||
return result
|
||||
return None
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes,not-callable
|
||||
class APIClient(threading.Thread):
|
||||
def __init__(self, address, port, password):
|
||||
threading.Thread.__init__(self)
|
||||
self._address = address # type: str
|
||||
self._port = port # type: int
|
||||
self._password = password # type: Optional[str]
|
||||
self._socket = None # type: Optional[socket.socket]
|
||||
self._socket_open_event = threading.Event()
|
||||
self._socket_write_lock = threading.Lock()
|
||||
self._connected = False
|
||||
self._authenticated = False
|
||||
self._message_handlers = []
|
||||
self._keepalive = 5
|
||||
self._ping_timer = None
|
||||
|
||||
self.on_disconnect = None
|
||||
self.on_connect = None
|
||||
self.on_login = None
|
||||
self.auto_reconnect = False
|
||||
self._running_event = threading.Event()
|
||||
self._stop_event = threading.Event()
|
||||
|
||||
@property
|
||||
def stopped(self):
|
||||
return self._stop_event.is_set()
|
||||
|
||||
def _refresh_ping(self):
|
||||
if self._ping_timer is not None:
|
||||
self._ping_timer.cancel()
|
||||
self._ping_timer = None
|
||||
|
||||
def func():
|
||||
self._ping_timer = None
|
||||
|
||||
if self._connected:
|
||||
try:
|
||||
self.ping()
|
||||
except APIConnectionError as err:
|
||||
self._fatal_error(err)
|
||||
else:
|
||||
self._refresh_ping()
|
||||
|
||||
self._ping_timer = threading.Timer(self._keepalive, func)
|
||||
self._ping_timer.start()
|
||||
|
||||
def _cancel_ping(self):
|
||||
if self._ping_timer is not None:
|
||||
self._ping_timer.cancel()
|
||||
self._ping_timer = None
|
||||
|
||||
def _close_socket(self):
|
||||
self._cancel_ping()
|
||||
if self._socket is not None:
|
||||
self._socket.close()
|
||||
self._socket = None
|
||||
self._socket_open_event.clear()
|
||||
self._connected = False
|
||||
self._authenticated = False
|
||||
self._message_handlers = []
|
||||
|
||||
def stop(self, force=False):
|
||||
if self.stopped:
|
||||
raise ValueError
|
||||
|
||||
if self._connected and not force:
|
||||
try:
|
||||
self.disconnect()
|
||||
except APIConnectionError:
|
||||
pass
|
||||
self._close_socket()
|
||||
|
||||
self._stop_event.set()
|
||||
if not force:
|
||||
self.join()
|
||||
|
||||
def connect(self):
|
||||
if not self._running_event.wait(0.1):
|
||||
raise APIConnectionError("You need to call start() first!")
|
||||
|
||||
if self._connected:
|
||||
self.disconnect(on_disconnect=False)
|
||||
|
||||
try:
|
||||
ip = resolve_ip_address(self._address)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.warning("Error resolving IP address of %s. Is it connected to WiFi?",
|
||||
self._address)
|
||||
_LOGGER.warning("(If this error persists, please set a static IP address: "
|
||||
"https://esphome.io/components/wifi.html#manual-ips)")
|
||||
raise APIConnectionError(err) from err
|
||||
|
||||
_LOGGER.info("Connecting to %s:%s (%s)", self._address, self._port, ip)
|
||||
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self._socket.settimeout(10.0)
|
||||
self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self._socket.connect((ip, self._port))
|
||||
except OSError as err:
|
||||
err = APIConnectionError(f"Error connecting to {ip}: {err}")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
self._socket.settimeout(0.1)
|
||||
|
||||
self._socket_open_event.set()
|
||||
|
||||
hello = pb.HelloRequest()
|
||||
hello.client_info = f'ESPHome v{const.__version__}'
|
||||
try:
|
||||
resp = self._send_message_await_response(hello, pb.HelloResponse)
|
||||
except APIConnectionError as err:
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
_LOGGER.debug("Successfully connected to %s ('%s' API=%s.%s)", self._address,
|
||||
resp.server_info, resp.api_version_major, resp.api_version_minor)
|
||||
self._connected = True
|
||||
self._refresh_ping()
|
||||
if self.on_connect is not None:
|
||||
self.on_connect()
|
||||
|
||||
def _check_connected(self):
|
||||
if not self._connected:
|
||||
err = APIConnectionError("Must be connected!")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
|
||||
def login(self):
|
||||
self._check_connected()
|
||||
if self._authenticated:
|
||||
raise APIConnectionError("Already logged in!")
|
||||
|
||||
connect = pb.ConnectRequest()
|
||||
if self._password is not None:
|
||||
connect.password = self._password
|
||||
resp = self._send_message_await_response(connect, pb.ConnectResponse)
|
||||
if resp.invalid_password:
|
||||
raise APIConnectionError("Invalid password!")
|
||||
|
||||
self._authenticated = True
|
||||
if self.on_login is not None:
|
||||
self.on_login()
|
||||
|
||||
def _fatal_error(self, err):
|
||||
was_connected = self._connected
|
||||
|
||||
self._close_socket()
|
||||
|
||||
if was_connected and self.on_disconnect is not None:
|
||||
self.on_disconnect(err)
|
||||
|
||||
def _write(self, data): # type: (bytes) -> None
|
||||
if self._socket is None:
|
||||
raise APIConnectionError("Socket closed")
|
||||
|
||||
# _LOGGER.debug("Write: %s", format_bytes(data))
|
||||
with self._socket_write_lock:
|
||||
try:
|
||||
self._socket.sendall(data)
|
||||
except OSError as err:
|
||||
err = APIConnectionError(f"Error while writing data: {err}")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
|
||||
def _send_message(self, msg):
|
||||
# type: (message.Message) -> None
|
||||
for message_type, klass in MESSAGE_TYPE_TO_PROTO.items():
|
||||
if isinstance(msg, klass):
|
||||
break
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
encoded = msg.SerializeToString()
|
||||
_LOGGER.debug("Sending %s:\n%s", type(msg), indent(str(msg)))
|
||||
req = bytes([0])
|
||||
req += _varuint_to_bytes(len(encoded))
|
||||
req += _varuint_to_bytes(message_type)
|
||||
req += encoded
|
||||
self._write(req)
|
||||
|
||||
def _send_message_await_response_complex(self, send_msg, do_append, do_stop, timeout=5):
|
||||
event = threading.Event()
|
||||
responses = []
|
||||
|
||||
def on_message(resp):
|
||||
if do_append(resp):
|
||||
responses.append(resp)
|
||||
if do_stop(resp):
|
||||
event.set()
|
||||
|
||||
self._message_handlers.append(on_message)
|
||||
self._send_message(send_msg)
|
||||
ret = event.wait(timeout)
|
||||
try:
|
||||
self._message_handlers.remove(on_message)
|
||||
except ValueError:
|
||||
pass
|
||||
if not ret:
|
||||
raise APIConnectionError("Timeout while waiting for message response!")
|
||||
return responses
|
||||
|
||||
def _send_message_await_response(self, send_msg, response_type, timeout=5):
|
||||
def is_response(msg):
|
||||
return isinstance(msg, response_type)
|
||||
|
||||
return self._send_message_await_response_complex(send_msg, is_response, is_response,
|
||||
timeout)[0]
|
||||
|
||||
def device_info(self):
|
||||
self._check_connected()
|
||||
return self._send_message_await_response(pb.DeviceInfoRequest(), pb.DeviceInfoResponse)
|
||||
|
||||
def ping(self):
|
||||
self._check_connected()
|
||||
return self._send_message_await_response(pb.PingRequest(), pb.PingResponse)
|
||||
|
||||
def disconnect(self, on_disconnect=True):
|
||||
self._check_connected()
|
||||
|
||||
try:
|
||||
self._send_message_await_response(pb.DisconnectRequest(), pb.DisconnectResponse)
|
||||
except APIConnectionError:
|
||||
pass
|
||||
self._close_socket()
|
||||
|
||||
if self.on_disconnect is not None and on_disconnect:
|
||||
self.on_disconnect(None)
|
||||
|
||||
def _check_authenticated(self):
|
||||
if not self._authenticated:
|
||||
raise APIConnectionError("Must login first!")
|
||||
|
||||
def subscribe_logs(self, on_log, log_level=7, dump_config=False):
|
||||
self._check_authenticated()
|
||||
|
||||
def on_msg(msg):
|
||||
if isinstance(msg, pb.SubscribeLogsResponse):
|
||||
on_log(msg)
|
||||
|
||||
self._message_handlers.append(on_msg)
|
||||
req = pb.SubscribeLogsRequest(dump_config=dump_config)
|
||||
req.level = log_level
|
||||
self._send_message(req)
|
||||
|
||||
def _recv(self, amount):
|
||||
ret = bytes()
|
||||
if amount == 0:
|
||||
return ret
|
||||
|
||||
while len(ret) < amount:
|
||||
if self.stopped:
|
||||
raise APIConnectionError("Stopped!")
|
||||
if not self._socket_open_event.is_set():
|
||||
raise APIConnectionError("No socket!")
|
||||
try:
|
||||
val = self._socket.recv(amount - len(ret))
|
||||
except AttributeError as err:
|
||||
raise APIConnectionError("Socket was closed") from err
|
||||
except socket.timeout:
|
||||
continue
|
||||
except OSError as err:
|
||||
raise APIConnectionError(f"Error while receiving data: {err}") from err
|
||||
ret += val
|
||||
return ret
|
||||
|
||||
def _recv_varint(self):
|
||||
raw = bytes()
|
||||
while not raw or raw[-1] & 0x80:
|
||||
raw += self._recv(1)
|
||||
return _bytes_to_varuint(raw)
|
||||
|
||||
def _run_once(self):
|
||||
if not self._socket_open_event.wait(0.1):
|
||||
return
|
||||
|
||||
# Preamble
|
||||
if self._recv(1)[0] != 0x00:
|
||||
raise APIConnectionError("Invalid preamble")
|
||||
|
||||
length = self._recv_varint()
|
||||
msg_type = self._recv_varint()
|
||||
|
||||
raw_msg = self._recv(length)
|
||||
if msg_type not in MESSAGE_TYPE_TO_PROTO:
|
||||
_LOGGER.debug("Skipping message type %s", msg_type)
|
||||
return
|
||||
|
||||
msg = MESSAGE_TYPE_TO_PROTO[msg_type]()
|
||||
msg.ParseFromString(raw_msg)
|
||||
_LOGGER.debug("Got message: %s:\n%s", type(msg), indent(str(msg)))
|
||||
for msg_handler in self._message_handlers[:]:
|
||||
msg_handler(msg)
|
||||
self._handle_internal_messages(msg)
|
||||
|
||||
def run(self):
|
||||
self._running_event.set()
|
||||
while not self.stopped:
|
||||
try:
|
||||
self._run_once()
|
||||
except APIConnectionError as err:
|
||||
if self.stopped:
|
||||
break
|
||||
if self._connected:
|
||||
_LOGGER.error("Error while reading incoming messages: %s", err)
|
||||
self._fatal_error(err)
|
||||
self._running_event.clear()
|
||||
|
||||
def _handle_internal_messages(self, msg):
|
||||
if isinstance(msg, pb.DisconnectRequest):
|
||||
self._send_message(pb.DisconnectResponse())
|
||||
if self._socket is not None:
|
||||
self._socket.close()
|
||||
self._socket = None
|
||||
self._connected = False
|
||||
if self.on_disconnect is not None:
|
||||
self.on_disconnect(None)
|
||||
elif isinstance(msg, pb.PingRequest):
|
||||
self._send_message(pb.PingResponse())
|
||||
elif isinstance(msg, pb.GetTimeRequest):
|
||||
resp = pb.GetTimeResponse()
|
||||
resp.epoch_seconds = int(time.time())
|
||||
self._send_message(resp)
|
||||
|
||||
|
||||
def run_logs(config, address):
|
||||
conf = config['api']
|
||||
port = conf[CONF_PORT]
|
||||
password = conf[CONF_PASSWORD]
|
||||
_LOGGER.info("Starting log output from %s using esphome API", address)
|
||||
|
||||
cli = APIClient(address, port, password)
|
||||
stopping = False
|
||||
retry_timer = []
|
||||
|
||||
has_connects = []
|
||||
|
||||
def try_connect(err, tries=0):
|
||||
if stopping:
|
||||
return
|
||||
|
||||
if err:
|
||||
_LOGGER.warning("Disconnected from API: %s", err)
|
||||
|
||||
while retry_timer:
|
||||
retry_timer.pop(0).cancel()
|
||||
|
||||
error = None
|
||||
try:
|
||||
cli.connect()
|
||||
cli.login()
|
||||
except APIConnectionError as err2: # noqa
|
||||
error = err2
|
||||
|
||||
if error is None:
|
||||
_LOGGER.info("Successfully connected to %s", address)
|
||||
return
|
||||
|
||||
wait_time = int(min(1.5**min(tries, 100), 30))
|
||||
if not has_connects:
|
||||
_LOGGER.warning("Initial connection failed. The ESP might not be connected "
|
||||
"to WiFi yet (%s). Re-Trying in %s seconds",
|
||||
error, wait_time)
|
||||
else:
|
||||
_LOGGER.warning("Couldn't connect to API (%s). Trying to reconnect in %s seconds",
|
||||
error, wait_time)
|
||||
timer = threading.Timer(wait_time, functools.partial(try_connect, None, tries + 1))
|
||||
timer.start()
|
||||
retry_timer.append(timer)
|
||||
|
||||
def on_log(msg):
|
||||
time_ = datetime.now().time().strftime('[%H:%M:%S]')
|
||||
text = msg.message
|
||||
if msg.send_failed:
|
||||
text = color('white', '(Message skipped because it was too big to fit in '
|
||||
'TCP buffer - This is only cosmetic)')
|
||||
safe_print(time_ + text)
|
||||
|
||||
def on_login():
|
||||
try:
|
||||
cli.subscribe_logs(on_log, dump_config=not has_connects)
|
||||
has_connects.append(True)
|
||||
except APIConnectionError:
|
||||
cli.disconnect()
|
||||
|
||||
cli.on_disconnect = try_connect
|
||||
cli.on_login = on_login
|
||||
cli.start()
|
||||
|
||||
try:
|
||||
try_connect(None)
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
stopping = True
|
||||
cli.stop(True)
|
||||
while retry_timer:
|
||||
retry_timer.pop(0).cancel()
|
||||
return 0
|
271
esphome/automation.py
Normal file
271
esphome/automation.py
Normal file
@@ -0,0 +1,271 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_AUTOMATION_ID, CONF_CONDITION, CONF_ELSE, CONF_ID, CONF_THEN, \
|
||||
CONF_TRIGGER_ID, CONF_TYPE_ID, CONF_TIME
|
||||
from esphome.core import coroutine
|
||||
from esphome.util import Registry
|
||||
|
||||
|
||||
def maybe_simple_id(*validators):
|
||||
return maybe_conf(CONF_ID, *validators)
|
||||
|
||||
|
||||
def maybe_conf(conf, *validators):
|
||||
validator = cv.All(*validators)
|
||||
|
||||
def validate(value):
|
||||
if isinstance(value, dict):
|
||||
return validator(value)
|
||||
with cv.remove_prepend_path([conf]):
|
||||
return validator({conf: value})
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def register_action(name, action_type, schema):
|
||||
return ACTION_REGISTRY.register(name, action_type, schema)
|
||||
|
||||
|
||||
def register_condition(name, condition_type, schema):
|
||||
return CONDITION_REGISTRY.register(name, condition_type, schema)
|
||||
|
||||
|
||||
Action = cg.esphome_ns.class_('Action')
|
||||
Trigger = cg.esphome_ns.class_('Trigger')
|
||||
ACTION_REGISTRY = Registry()
|
||||
Condition = cg.esphome_ns.class_('Condition')
|
||||
CONDITION_REGISTRY = Registry()
|
||||
validate_action = cv.validate_registry_entry('action', ACTION_REGISTRY)
|
||||
validate_action_list = cv.validate_registry('action', ACTION_REGISTRY)
|
||||
validate_condition = cv.validate_registry_entry('condition', CONDITION_REGISTRY)
|
||||
validate_condition_list = cv.validate_registry('condition', CONDITION_REGISTRY)
|
||||
|
||||
|
||||
def validate_potentially_and_condition(value):
|
||||
if isinstance(value, list):
|
||||
with cv.remove_prepend_path(['and']):
|
||||
return validate_condition({
|
||||
'and': value
|
||||
})
|
||||
return validate_condition(value)
|
||||
|
||||
|
||||
DelayAction = cg.esphome_ns.class_('DelayAction', Action, cg.Component)
|
||||
LambdaAction = cg.esphome_ns.class_('LambdaAction', Action)
|
||||
IfAction = cg.esphome_ns.class_('IfAction', Action)
|
||||
WhileAction = cg.esphome_ns.class_('WhileAction', Action)
|
||||
WaitUntilAction = cg.esphome_ns.class_('WaitUntilAction', Action, cg.Component)
|
||||
UpdateComponentAction = cg.esphome_ns.class_('UpdateComponentAction', Action)
|
||||
Automation = cg.esphome_ns.class_('Automation')
|
||||
|
||||
LambdaCondition = cg.esphome_ns.class_('LambdaCondition', Condition)
|
||||
ForCondition = cg.esphome_ns.class_('ForCondition', Condition, cg.Component)
|
||||
|
||||
|
||||
def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
if extra_schema is None:
|
||||
extra_schema = {}
|
||||
if isinstance(extra_schema, cv.Schema):
|
||||
extra_schema = extra_schema.schema
|
||||
schema = AUTOMATION_SCHEMA.extend(extra_schema)
|
||||
|
||||
def validator_(value):
|
||||
if isinstance(value, list):
|
||||
# List of items, there are two possible options here, either a sequence of
|
||||
# actions (no then:) or a list of automations.
|
||||
try:
|
||||
# First try as a sequence of actions
|
||||
# If that succeeds, return immediately
|
||||
with cv.remove_prepend_path([CONF_THEN]):
|
||||
return [schema({CONF_THEN: value})]
|
||||
except cv.Invalid as err:
|
||||
# Next try as a sequence of automations
|
||||
try:
|
||||
return cv.Schema([schema])(value)
|
||||
except cv.Invalid as err2:
|
||||
if 'extra keys not allowed' in str(err2) and len(err2.path) == 2:
|
||||
# pylint: disable=raise-missing-from
|
||||
raise err
|
||||
if 'Unable to find action' in str(err):
|
||||
raise err2
|
||||
raise cv.MultipleInvalid([err, err2])
|
||||
elif isinstance(value, dict):
|
||||
if CONF_THEN in value:
|
||||
return [schema(value)]
|
||||
with cv.remove_prepend_path([CONF_THEN]):
|
||||
return [schema({CONF_THEN: value})]
|
||||
# This should only happen with invalid configs, but let's have a nice error message.
|
||||
return [schema(value)]
|
||||
|
||||
def validator(value):
|
||||
value = validator_(value)
|
||||
if extra_validators is not None:
|
||||
value = cv.Schema([extra_validators])(value)
|
||||
if single:
|
||||
if len(value) != 1:
|
||||
raise cv.Invalid("Cannot have more than 1 automation for templates")
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
return validator
|
||||
|
||||
|
||||
AUTOMATION_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(Trigger),
|
||||
cv.GenerateID(CONF_AUTOMATION_ID): cv.declare_id(Automation),
|
||||
cv.Required(CONF_THEN): validate_action_list,
|
||||
})
|
||||
|
||||
AndCondition = cg.esphome_ns.class_('AndCondition', Condition)
|
||||
OrCondition = cg.esphome_ns.class_('OrCondition', Condition)
|
||||
NotCondition = cg.esphome_ns.class_('NotCondition', Condition)
|
||||
|
||||
|
||||
@register_condition('and', AndCondition, validate_condition_list)
|
||||
def and_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = yield build_condition_list(config, template_arg, args)
|
||||
yield cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition('or', OrCondition, validate_condition_list)
|
||||
def or_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = yield build_condition_list(config, template_arg, args)
|
||||
yield cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition('not', NotCondition, validate_potentially_and_condition)
|
||||
def not_condition_to_code(config, condition_id, template_arg, args):
|
||||
condition = yield build_condition(config, template_arg, args)
|
||||
yield cg.new_Pvariable(condition_id, template_arg, condition)
|
||||
|
||||
|
||||
@register_condition('lambda', LambdaCondition, cv.lambda_)
|
||||
def lambda_condition_to_code(config, condition_id, template_arg, args):
|
||||
lambda_ = yield cg.process_lambda(config, args, return_type=bool)
|
||||
yield cg.new_Pvariable(condition_id, template_arg, lambda_)
|
||||
|
||||
|
||||
@register_condition('for', ForCondition, cv.Schema({
|
||||
cv.Required(CONF_TIME): cv.templatable(cv.positive_time_period_milliseconds),
|
||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
||||
}).extend(cv.COMPONENT_SCHEMA))
|
||||
def for_condition_to_code(config, condition_id, template_arg, args):
|
||||
condition = yield build_condition(config[CONF_CONDITION], cg.TemplateArguments(), [])
|
||||
var = cg.new_Pvariable(condition_id, template_arg, condition)
|
||||
yield cg.register_component(var, config)
|
||||
templ = yield cg.templatable(config[CONF_TIME], args, cg.uint32)
|
||||
cg.add(var.set_time(templ))
|
||||
yield var
|
||||
|
||||
|
||||
@register_action('delay', DelayAction, cv.templatable(cv.positive_time_period_milliseconds))
|
||||
def delay_action_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
yield cg.register_component(var, {})
|
||||
template_ = yield cg.templatable(config, args, cg.uint32)
|
||||
cg.add(var.set_delay(template_))
|
||||
yield var
|
||||
|
||||
|
||||
@register_action('if', IfAction, cv.All({
|
||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
||||
cv.Optional(CONF_THEN): validate_action_list,
|
||||
cv.Optional(CONF_ELSE): validate_action_list,
|
||||
}, cv.has_at_least_one_key(CONF_THEN, CONF_ELSE)))
|
||||
def if_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = yield build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
if CONF_THEN in config:
|
||||
actions = yield build_action_list(config[CONF_THEN], template_arg, args)
|
||||
cg.add(var.add_then(actions))
|
||||
if CONF_ELSE in config:
|
||||
actions = yield build_action_list(config[CONF_ELSE], template_arg, args)
|
||||
cg.add(var.add_else(actions))
|
||||
yield var
|
||||
|
||||
|
||||
@register_action('while', WhileAction, cv.Schema({
|
||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
||||
cv.Required(CONF_THEN): validate_action_list,
|
||||
}))
|
||||
def while_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = yield build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
actions = yield build_action_list(config[CONF_THEN], template_arg, args)
|
||||
cg.add(var.add_then(actions))
|
||||
yield var
|
||||
|
||||
|
||||
def validate_wait_until(value):
|
||||
schema = cv.Schema({
|
||||
cv.Required(CONF_CONDITION): validate_potentially_and_condition,
|
||||
})
|
||||
if isinstance(value, dict) and CONF_CONDITION in value:
|
||||
return schema(value)
|
||||
return validate_wait_until({CONF_CONDITION: value})
|
||||
|
||||
|
||||
@register_action('wait_until', WaitUntilAction, validate_wait_until)
|
||||
def wait_until_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = yield build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
yield cg.register_component(var, {})
|
||||
yield var
|
||||
|
||||
|
||||
@register_action('lambda', LambdaAction, cv.lambda_)
|
||||
def lambda_action_to_code(config, action_id, template_arg, args):
|
||||
lambda_ = yield cg.process_lambda(config, args, return_type=cg.void)
|
||||
yield cg.new_Pvariable(action_id, template_arg, lambda_)
|
||||
|
||||
|
||||
@register_action('component.update', UpdateComponentAction, maybe_simple_id({
|
||||
cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
|
||||
}))
|
||||
def component_update_action_to_code(config, action_id, template_arg, args):
|
||||
comp = yield cg.get_variable(config[CONF_ID])
|
||||
yield cg.new_Pvariable(action_id, template_arg, comp)
|
||||
|
||||
|
||||
@coroutine
|
||||
def build_action(full_config, template_arg, args):
|
||||
registry_entry, config = cg.extract_registry_entry_config(ACTION_REGISTRY, full_config)
|
||||
action_id = full_config[CONF_TYPE_ID]
|
||||
builder = registry_entry.coroutine_fun
|
||||
yield builder(config, action_id, template_arg, args)
|
||||
|
||||
|
||||
@coroutine
|
||||
def build_action_list(config, templ, arg_type):
|
||||
actions = []
|
||||
for conf in config:
|
||||
action = yield build_action(conf, templ, arg_type)
|
||||
actions.append(action)
|
||||
yield actions
|
||||
|
||||
|
||||
@coroutine
|
||||
def build_condition(full_config, template_arg, args):
|
||||
registry_entry, config = cg.extract_registry_entry_config(CONDITION_REGISTRY, full_config)
|
||||
action_id = full_config[CONF_TYPE_ID]
|
||||
builder = registry_entry.coroutine_fun
|
||||
yield builder(config, action_id, template_arg, args)
|
||||
|
||||
|
||||
@coroutine
|
||||
def build_condition_list(config, templ, args):
|
||||
conditions = []
|
||||
for conf in config:
|
||||
condition = yield build_condition(conf, templ, args)
|
||||
conditions.append(condition)
|
||||
yield conditions
|
||||
|
||||
|
||||
@coroutine
|
||||
def build_automation(trigger, args, config):
|
||||
arg_types = [arg[0] for arg in args]
|
||||
templ = cg.TemplateArguments(*arg_types)
|
||||
obj = cg.new_Pvariable(config[CONF_AUTOMATION_ID], templ, trigger)
|
||||
actions = yield build_action_list(config[CONF_THEN], templ, args)
|
||||
cg.add(obj.add_actions(actions))
|
||||
yield obj
|
26
esphome/codegen.py
Normal file
26
esphome/codegen.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Base file for all codegen-related imports
|
||||
# All integrations should have a line in the import section like this
|
||||
#
|
||||
# >>> import esphome.codegen as cg
|
||||
#
|
||||
# Integrations should specifically *NOT* import directly from the
|
||||
# other helper modules (cpp_generator etc) directly if they don't
|
||||
# want to break suddenly due to a rename (this file will get backports for features).
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from esphome.cpp_generator import ( # noqa
|
||||
Expression, RawExpression, RawStatement, TemplateArguments,
|
||||
StructInitializer, ArrayInitializer, safe_exp, Statement, LineComment,
|
||||
progmem_array, statement, variable, Pvariable, new_Pvariable,
|
||||
add, add_global, add_library, add_build_flag, add_define,
|
||||
get_variable, get_variable_with_full_id, process_lambda, is_template, templatable, MockObj,
|
||||
MockObjClass)
|
||||
from esphome.cpp_helpers import ( # noqa
|
||||
gpio_pin_expression, register_component, build_registry_entry,
|
||||
build_registry_list, extract_registry_entry_config, register_parented)
|
||||
from esphome.cpp_types import ( # noqa
|
||||
global_ns, void, nullptr, float_, double, bool_, int_, std_ns, std_string,
|
||||
std_vector, uint8, uint16, uint32, int32, const_char_ptr, NAN,
|
||||
esphome_ns, App, Nameable, Component, ComponentPtr,
|
||||
PollingComponent, Application, optional, arduino_json_ns, JsonObject,
|
||||
JsonObjectRef, JsonObjectConstRef, Controller, GPIOPin)
|
0
esphome/components/a4988/__init__.py
Normal file
0
esphome/components/a4988/__init__.py
Normal file
49
esphome/components/a4988/a4988.cpp
Normal file
49
esphome/components/a4988/a4988.cpp
Normal file
@@ -0,0 +1,49 @@
|
||||
#include "a4988.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace a4988 {
|
||||
|
||||
static const char *TAG = "a4988.stepper";
|
||||
|
||||
void A4988::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up A4988...");
|
||||
if (this->sleep_pin_ != nullptr) {
|
||||
this->sleep_pin_->setup();
|
||||
this->sleep_pin_->digital_write(false);
|
||||
}
|
||||
this->step_pin_->setup();
|
||||
this->step_pin_->digital_write(false);
|
||||
this->dir_pin_->setup();
|
||||
this->dir_pin_->digital_write(false);
|
||||
}
|
||||
void A4988::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "A4988:");
|
||||
LOG_PIN(" Step Pin: ", this->step_pin_);
|
||||
LOG_PIN(" Dir Pin: ", this->dir_pin_);
|
||||
LOG_PIN(" Sleep Pin: ", this->sleep_pin_);
|
||||
LOG_STEPPER(this);
|
||||
}
|
||||
void A4988::loop() {
|
||||
bool at_target = this->has_reached_target();
|
||||
if (this->sleep_pin_ != nullptr) {
|
||||
this->sleep_pin_->digital_write(!at_target);
|
||||
}
|
||||
if (at_target) {
|
||||
this->high_freq_.stop();
|
||||
} else {
|
||||
this->high_freq_.start();
|
||||
}
|
||||
|
||||
int32_t dir = this->should_step_();
|
||||
if (dir == 0)
|
||||
return;
|
||||
|
||||
this->dir_pin_->digital_write(dir == 1);
|
||||
this->step_pin_->digital_write(true);
|
||||
delayMicroseconds(5);
|
||||
this->step_pin_->digital_write(false);
|
||||
}
|
||||
|
||||
} // namespace a4988
|
||||
} // namespace esphome
|
28
esphome/components/a4988/a4988.h
Normal file
28
esphome/components/a4988/a4988.h
Normal file
@@ -0,0 +1,28 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/esphal.h"
|
||||
#include "esphome/components/stepper/stepper.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace a4988 {
|
||||
|
||||
class A4988 : public stepper::Stepper, public Component {
|
||||
public:
|
||||
void set_step_pin(GPIOPin *step_pin) { step_pin_ = step_pin; }
|
||||
void set_dir_pin(GPIOPin *dir_pin) { dir_pin_ = dir_pin; }
|
||||
void set_sleep_pin(GPIOPin *sleep_pin) { this->sleep_pin_ = sleep_pin; }
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void loop() override;
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
|
||||
protected:
|
||||
GPIOPin *step_pin_;
|
||||
GPIOPin *dir_pin_;
|
||||
GPIOPin *sleep_pin_{nullptr};
|
||||
HighFrequencyLoopRequester high_freq_;
|
||||
};
|
||||
|
||||
} // namespace a4988
|
||||
} // namespace esphome
|
31
esphome/components/a4988/stepper.py
Normal file
31
esphome/components/a4988/stepper.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from esphome import pins
|
||||
from esphome.components import stepper
|
||||
import esphome.config_validation as cv
|
||||
import esphome.codegen as cg
|
||||
from esphome.const import CONF_DIR_PIN, CONF_ID, CONF_SLEEP_PIN, CONF_STEP_PIN
|
||||
|
||||
|
||||
a4988_ns = cg.esphome_ns.namespace('a4988')
|
||||
A4988 = a4988_ns.class_('A4988', stepper.Stepper, cg.Component)
|
||||
|
||||
CONFIG_SCHEMA = stepper.STEPPER_SCHEMA.extend({
|
||||
cv.Required(CONF_ID): cv.declare_id(A4988),
|
||||
cv.Required(CONF_STEP_PIN): pins.gpio_output_pin_schema,
|
||||
cv.Required(CONF_DIR_PIN): pins.gpio_output_pin_schema,
|
||||
cv.Optional(CONF_SLEEP_PIN): pins.gpio_output_pin_schema,
|
||||
}).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
yield stepper.register_stepper(var, config)
|
||||
|
||||
step_pin = yield cg.gpio_pin_expression(config[CONF_STEP_PIN])
|
||||
cg.add(var.set_step_pin(step_pin))
|
||||
dir_pin = yield cg.gpio_pin_expression(config[CONF_DIR_PIN])
|
||||
cg.add(var.set_dir_pin(dir_pin))
|
||||
|
||||
if CONF_SLEEP_PIN in config:
|
||||
sleep_pin = yield cg.gpio_pin_expression(config[CONF_SLEEP_PIN])
|
||||
cg.add(var.set_sleep_pin(sleep_pin))
|
0
esphome/components/ac_dimmer/__init__.py
Normal file
0
esphome/components/ac_dimmer/__init__.py
Normal file
217
esphome/components/ac_dimmer/ac_dimmer.cpp
Normal file
217
esphome/components/ac_dimmer/ac_dimmer.cpp
Normal file
@@ -0,0 +1,217 @@
|
||||
#include "ac_dimmer.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP8266
|
||||
#include <core_esp8266_waveform.h>
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace ac_dimmer {
|
||||
|
||||
static const char *TAG = "ac_dimmer";
|
||||
|
||||
// Global array to store dimmer objects
|
||||
static AcDimmerDataStore *all_dimmers[32];
|
||||
|
||||
/// Time in microseconds the gate should be held high
|
||||
/// 10µs should be long enough for most triacs
|
||||
/// For reference: BT136 datasheet says 2µs nominal (page 7)
|
||||
static uint32_t GATE_ENABLE_TIME = 10;
|
||||
|
||||
/// Function called from timer interrupt
|
||||
/// Input is current time in microseconds (micros())
|
||||
/// Returns when next "event" is expected in µs, or 0 if no such event known.
|
||||
uint32_t ICACHE_RAM_ATTR HOT AcDimmerDataStore::timer_intr(uint32_t now) {
|
||||
// If no ZC signal received yet.
|
||||
if (this->crossed_zero_at == 0)
|
||||
return 0;
|
||||
|
||||
uint32_t time_since_zc = now - this->crossed_zero_at;
|
||||
if (this->value == 65535 || this->value == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (this->enable_time_us != 0 && time_since_zc >= this->enable_time_us) {
|
||||
this->enable_time_us = 0;
|
||||
this->gate_pin->digital_write(true);
|
||||
// Prevent too short pulses
|
||||
this->disable_time_us = max(this->disable_time_us, time_since_zc + GATE_ENABLE_TIME);
|
||||
}
|
||||
if (this->disable_time_us != 0 && time_since_zc >= this->disable_time_us) {
|
||||
this->disable_time_us = 0;
|
||||
this->gate_pin->digital_write(false);
|
||||
}
|
||||
|
||||
if (time_since_zc < this->enable_time_us)
|
||||
// Next event is enable, return time until that event
|
||||
return this->enable_time_us - time_since_zc;
|
||||
else if (time_since_zc < disable_time_us) {
|
||||
// Next event is disable, return time until that event
|
||||
return this->disable_time_us - time_since_zc;
|
||||
}
|
||||
|
||||
if (time_since_zc >= this->cycle_time_us) {
|
||||
// Already past last cycle time, schedule next call shortly
|
||||
return 100;
|
||||
}
|
||||
|
||||
return this->cycle_time_us - time_since_zc;
|
||||
}
|
||||
|
||||
/// Run timer interrupt code and return in how many µs the next event is expected
|
||||
uint32_t ICACHE_RAM_ATTR HOT timer_interrupt() {
|
||||
// run at least with 1kHz
|
||||
uint32_t min_dt_us = 1000;
|
||||
uint32_t now = micros();
|
||||
for (auto *dimmer : all_dimmers) {
|
||||
if (dimmer == nullptr)
|
||||
// no more dimmers
|
||||
break;
|
||||
uint32_t res = dimmer->timer_intr(now);
|
||||
if (res != 0 && res < min_dt_us)
|
||||
min_dt_us = res;
|
||||
}
|
||||
// return time until next timer1 interrupt in µs
|
||||
return min_dt_us;
|
||||
}
|
||||
|
||||
/// GPIO interrupt routine, called when ZC pin triggers
|
||||
void ICACHE_RAM_ATTR HOT AcDimmerDataStore::gpio_intr() {
|
||||
uint32_t prev_crossed = this->crossed_zero_at;
|
||||
|
||||
// 50Hz mains frequency should give a half cycle of 10ms a 60Hz will give 8.33ms
|
||||
// in any case the cycle last at least 5ms
|
||||
this->crossed_zero_at = micros();
|
||||
uint32_t cycle_time = this->crossed_zero_at - prev_crossed;
|
||||
if (cycle_time > 5000) {
|
||||
this->cycle_time_us = cycle_time;
|
||||
} else {
|
||||
// Otherwise this is noise and this is 2nd (or 3rd...) fall in the same pulse
|
||||
// Consider this is the right fall edge and accumulate the cycle time instead
|
||||
this->cycle_time_us += cycle_time;
|
||||
}
|
||||
|
||||
if (this->value == 65535) {
|
||||
// fully on, enable output immediately
|
||||
this->gate_pin->digital_write(true);
|
||||
} else if (this->init_cycle) {
|
||||
// send a full cycle
|
||||
this->init_cycle = false;
|
||||
this->enable_time_us = 0;
|
||||
this->disable_time_us = cycle_time_us;
|
||||
} else if (this->value == 0) {
|
||||
// fully off, disable output immediately
|
||||
this->gate_pin->digital_write(false);
|
||||
} else {
|
||||
if (this->method == DIM_METHOD_TRAILING) {
|
||||
this->enable_time_us = 1; // cannot be 0
|
||||
this->disable_time_us = max((uint32_t) 10, this->value * this->cycle_time_us / 65535);
|
||||
} else {
|
||||
// calculate time until enable in µs: (1.0-value)*cycle_time, but with integer arithmetic
|
||||
// also take into account min_power
|
||||
auto min_us = this->cycle_time_us * this->min_power / 1000;
|
||||
this->enable_time_us = max((uint32_t) 1, ((65535 - this->value) * (this->cycle_time_us - min_us)) / 65535);
|
||||
if (this->method == DIM_METHOD_LEADING_PULSE) {
|
||||
// Minimum pulse time should be enough for the triac to trigger when it is close to the ZC zone
|
||||
// this is for brightness near 99%
|
||||
this->disable_time_us = max(this->enable_time_us + GATE_ENABLE_TIME, (uint32_t) cycle_time_us / 10);
|
||||
} else {
|
||||
this->gate_pin->digital_write(false);
|
||||
this->disable_time_us = this->cycle_time_us;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ICACHE_RAM_ATTR HOT AcDimmerDataStore::s_gpio_intr(AcDimmerDataStore *store) {
|
||||
// Attaching pin interrupts on the same pin will override the previous interupt
|
||||
// However, the user expects that multiple dimmers sharing the same ZC pin will work.
|
||||
// We solve this in a bit of a hacky way: On each pin interrupt, we check all dimmers
|
||||
// if any of them are using the same ZC pin, and also trigger the interrupt for *them*.
|
||||
for (auto *dimmer : all_dimmers) {
|
||||
if (dimmer == nullptr)
|
||||
break;
|
||||
if (dimmer->zero_cross_pin_number == store->zero_cross_pin_number) {
|
||||
dimmer->gpio_intr();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
// ESP32 implementation, uses basically the same code but needs to wrap
|
||||
// timer_interrupt() function to auto-reschedule
|
||||
static hw_timer_t *dimmer_timer = nullptr;
|
||||
void ICACHE_RAM_ATTR HOT AcDimmerDataStore::s_timer_intr() { timer_interrupt(); }
|
||||
#endif
|
||||
|
||||
void AcDimmer::setup() {
|
||||
// extend all_dimmers array with our dimmer
|
||||
|
||||
// Need to be sure the zero cross pin is setup only once, ESP8266 fails and ESP32 seems to fail silently
|
||||
auto setup_zero_cross_pin = true;
|
||||
|
||||
for (auto &all_dimmer : all_dimmers) {
|
||||
if (all_dimmer == nullptr) {
|
||||
all_dimmer = &this->store_;
|
||||
break;
|
||||
}
|
||||
if (all_dimmer->zero_cross_pin_number == this->zero_cross_pin_->get_pin()) {
|
||||
setup_zero_cross_pin = false;
|
||||
}
|
||||
}
|
||||
|
||||
this->gate_pin_->setup();
|
||||
this->store_.gate_pin = this->gate_pin_->to_isr();
|
||||
this->store_.zero_cross_pin_number = this->zero_cross_pin_->get_pin();
|
||||
this->store_.min_power = static_cast<uint16_t>(this->min_power_ * 1000);
|
||||
this->min_power_ = 0;
|
||||
this->store_.method = this->method_;
|
||||
|
||||
if (setup_zero_cross_pin) {
|
||||
this->zero_cross_pin_->setup();
|
||||
this->store_.zero_cross_pin = this->zero_cross_pin_->to_isr();
|
||||
this->zero_cross_pin_->attach_interrupt(&AcDimmerDataStore::s_gpio_intr, &this->store_, FALLING);
|
||||
}
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP8266
|
||||
// Uses ESP8266 waveform (soft PWM) class
|
||||
// PWM and AcDimmer can even run at the same time this way
|
||||
setTimer1Callback(&timer_interrupt);
|
||||
#endif
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
// 80 Divider -> 1 count=1µs
|
||||
dimmer_timer = timerBegin(0, 80, true);
|
||||
timerAttachInterrupt(dimmer_timer, &AcDimmerDataStore::s_timer_intr, true);
|
||||
// For ESP32, we can't use dynamic interval calculation because the timerX functions
|
||||
// are not callable from ISR (placed in flash storage).
|
||||
// Here we just use an interrupt firing every 50 µs.
|
||||
timerAlarmWrite(dimmer_timer, 50, true);
|
||||
timerAlarmEnable(dimmer_timer);
|
||||
#endif
|
||||
}
|
||||
void AcDimmer::write_state(float state) {
|
||||
auto new_value = static_cast<uint16_t>(roundf(state * 65535));
|
||||
if (new_value != 0 && this->store_.value == 0)
|
||||
this->store_.init_cycle = this->init_with_half_cycle_;
|
||||
this->store_.value = new_value;
|
||||
}
|
||||
void AcDimmer::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "AcDimmer:");
|
||||
LOG_PIN(" Output Pin: ", this->gate_pin_);
|
||||
LOG_PIN(" Zero-Cross Pin: ", this->zero_cross_pin_);
|
||||
ESP_LOGCONFIG(TAG, " Min Power: %.1f%%", this->store_.min_power / 10.0f);
|
||||
ESP_LOGCONFIG(TAG, " Init with half cycle: %s", YESNO(this->init_with_half_cycle_));
|
||||
if (method_ == DIM_METHOD_LEADING_PULSE)
|
||||
ESP_LOGCONFIG(TAG, " Method: leading pulse");
|
||||
else if (method_ == DIM_METHOD_LEADING)
|
||||
ESP_LOGCONFIG(TAG, " Method: leading");
|
||||
else
|
||||
ESP_LOGCONFIG(TAG, " Method: trailing");
|
||||
|
||||
LOG_FLOAT_OUTPUT(this);
|
||||
ESP_LOGV(TAG, " Estimated Frequency: %.3fHz", 1e6f / this->store_.cycle_time_us / 2);
|
||||
}
|
||||
|
||||
} // namespace ac_dimmer
|
||||
} // namespace esphome
|
66
esphome/components/ac_dimmer/ac_dimmer.h
Normal file
66
esphome/components/ac_dimmer/ac_dimmer.h
Normal file
@@ -0,0 +1,66 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/esphal.h"
|
||||
#include "esphome/components/output/float_output.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace ac_dimmer {
|
||||
|
||||
enum DimMethod { DIM_METHOD_LEADING_PULSE = 0, DIM_METHOD_LEADING, DIM_METHOD_TRAILING };
|
||||
|
||||
struct AcDimmerDataStore {
|
||||
/// Zero-cross pin
|
||||
ISRInternalGPIOPin *zero_cross_pin;
|
||||
/// Zero-cross pin number - used to share ZC pin across multiple dimmers
|
||||
uint8_t zero_cross_pin_number;
|
||||
/// Output pin to write to
|
||||
ISRInternalGPIOPin *gate_pin;
|
||||
/// Value of the dimmer - 0 to 65535.
|
||||
uint16_t value;
|
||||
/// Minimum power for activation
|
||||
uint16_t min_power;
|
||||
/// Time between the last two ZC pulses
|
||||
uint32_t cycle_time_us;
|
||||
/// Time (in micros()) of last ZC signal
|
||||
uint32_t crossed_zero_at;
|
||||
/// Time since last ZC pulse to enable gate pin. 0 means not set.
|
||||
uint32_t enable_time_us;
|
||||
/// Time since last ZC pulse to disable gate pin. 0 means no disable.
|
||||
uint32_t disable_time_us;
|
||||
/// Set to send the first half ac cycle complete
|
||||
bool init_cycle;
|
||||
/// Dimmer method
|
||||
DimMethod method;
|
||||
|
||||
uint32_t timer_intr(uint32_t now);
|
||||
|
||||
void gpio_intr();
|
||||
static void s_gpio_intr(AcDimmerDataStore *store);
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
static void s_timer_intr();
|
||||
#endif
|
||||
};
|
||||
|
||||
class AcDimmer : public output::FloatOutput, public Component {
|
||||
public:
|
||||
void setup() override;
|
||||
|
||||
void dump_config() override;
|
||||
void set_gate_pin(GPIOPin *gate_pin) { gate_pin_ = gate_pin; }
|
||||
void set_zero_cross_pin(GPIOPin *zero_cross_pin) { zero_cross_pin_ = zero_cross_pin; }
|
||||
void set_init_with_half_cycle(bool init_with_half_cycle) { init_with_half_cycle_ = init_with_half_cycle; }
|
||||
void set_method(DimMethod method) { method_ = method; }
|
||||
|
||||
protected:
|
||||
void write_state(float state) override;
|
||||
|
||||
GPIOPin *gate_pin_;
|
||||
GPIOPin *zero_cross_pin_;
|
||||
AcDimmerDataStore store_;
|
||||
bool init_with_half_cycle_;
|
||||
DimMethod method_;
|
||||
};
|
||||
|
||||
} // namespace ac_dimmer
|
||||
} // namespace esphome
|
45
esphome/components/ac_dimmer/output.py
Normal file
45
esphome/components/ac_dimmer/output.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome import pins
|
||||
from esphome.components import output
|
||||
from esphome.const import CONF_ID, CONF_MIN_POWER, CONF_METHOD
|
||||
|
||||
CODEOWNERS = ['@glmnet']
|
||||
|
||||
ac_dimmer_ns = cg.esphome_ns.namespace('ac_dimmer')
|
||||
AcDimmer = ac_dimmer_ns.class_('AcDimmer', output.FloatOutput, cg.Component)
|
||||
|
||||
DimMethod = ac_dimmer_ns.enum('DimMethod')
|
||||
DIM_METHODS = {
|
||||
'LEADING_PULSE': DimMethod.DIM_METHOD_LEADING_PULSE,
|
||||
'LEADING': DimMethod.DIM_METHOD_LEADING,
|
||||
'TRAILING': DimMethod.DIM_METHOD_TRAILING,
|
||||
}
|
||||
|
||||
CONF_GATE_PIN = 'gate_pin'
|
||||
CONF_ZERO_CROSS_PIN = 'zero_cross_pin'
|
||||
CONF_INIT_WITH_HALF_CYCLE = 'init_with_half_cycle'
|
||||
CONFIG_SCHEMA = output.FLOAT_OUTPUT_SCHEMA.extend({
|
||||
cv.Required(CONF_ID): cv.declare_id(AcDimmer),
|
||||
cv.Required(CONF_GATE_PIN): pins.internal_gpio_output_pin_schema,
|
||||
cv.Required(CONF_ZERO_CROSS_PIN): pins.internal_gpio_input_pin_schema,
|
||||
cv.Optional(CONF_INIT_WITH_HALF_CYCLE, default=True): cv.boolean,
|
||||
cv.Optional(CONF_METHOD, default='leading pulse'): cv.enum(DIM_METHODS, upper=True, space='_'),
|
||||
}).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
|
||||
# override default min power to 10%
|
||||
if CONF_MIN_POWER not in config:
|
||||
config[CONF_MIN_POWER] = 0.1
|
||||
yield output.register_output(var, config)
|
||||
|
||||
pin = yield cg.gpio_pin_expression(config[CONF_GATE_PIN])
|
||||
cg.add(var.set_gate_pin(pin))
|
||||
pin = yield cg.gpio_pin_expression(config[CONF_ZERO_CROSS_PIN])
|
||||
cg.add(var.set_zero_cross_pin(pin))
|
||||
cg.add(var.set_init_with_half_cycle(config[CONF_INIT_WITH_HALF_CYCLE]))
|
||||
cg.add(var.set_method(config[CONF_METHOD]))
|
24
esphome/components/adalight/__init__.py
Normal file
24
esphome/components/adalight/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import uart
|
||||
from esphome.components.light.types import AddressableLightEffect
|
||||
from esphome.components.light.effects import register_addressable_effect
|
||||
from esphome.const import CONF_NAME, CONF_UART_ID
|
||||
|
||||
DEPENDENCIES = ['uart']
|
||||
|
||||
adalight_ns = cg.esphome_ns.namespace('adalight')
|
||||
AdalightLightEffect = adalight_ns.class_(
|
||||
'AdalightLightEffect', uart.UARTDevice, AddressableLightEffect)
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({})
|
||||
|
||||
|
||||
@register_addressable_effect('adalight', AdalightLightEffect, "Adalight", {
|
||||
cv.GenerateID(CONF_UART_ID): cv.use_id(uart.UARTComponent)
|
||||
})
|
||||
def adalight_light_effect_to_code(config, effect_id):
|
||||
effect = cg.new_Pvariable(effect_id, config[CONF_NAME])
|
||||
yield uart.register_uart_device(effect, config)
|
||||
|
||||
yield effect
|
140
esphome/components/adalight/adalight_light_effect.cpp
Normal file
140
esphome/components/adalight/adalight_light_effect.cpp
Normal file
@@ -0,0 +1,140 @@
|
||||
#include "adalight_light_effect.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace adalight {
|
||||
|
||||
static const char *TAG = "adalight_light_effect";
|
||||
|
||||
static const uint32_t ADALIGHT_ACK_INTERVAL = 1000;
|
||||
static const uint32_t ADALIGHT_RECEIVE_TIMEOUT = 1000;
|
||||
|
||||
AdalightLightEffect::AdalightLightEffect(const std::string &name) : AddressableLightEffect(name) {}
|
||||
|
||||
void AdalightLightEffect::start() {
|
||||
AddressableLightEffect::start();
|
||||
|
||||
last_ack_ = 0;
|
||||
last_byte_ = 0;
|
||||
last_reset_ = 0;
|
||||
}
|
||||
|
||||
void AdalightLightEffect::stop() {
|
||||
frame_.resize(0);
|
||||
|
||||
AddressableLightEffect::stop();
|
||||
}
|
||||
|
||||
int AdalightLightEffect::get_frame_size_(int led_count) const {
|
||||
// 3 bytes: Ada
|
||||
// 2 bytes: LED count
|
||||
// 1 byte: checksum
|
||||
// 3 bytes per LED
|
||||
return 3 + 2 + 1 + led_count * 3;
|
||||
}
|
||||
|
||||
void AdalightLightEffect::reset_frame_(light::AddressableLight &it) {
|
||||
int buffer_capacity = get_frame_size_(it.size());
|
||||
|
||||
frame_.clear();
|
||||
frame_.reserve(buffer_capacity);
|
||||
}
|
||||
|
||||
void AdalightLightEffect::blank_all_leds_(light::AddressableLight &it) {
|
||||
for (int led = it.size(); led-- > 0;) {
|
||||
it[led].set(light::ESPColor::BLACK);
|
||||
}
|
||||
}
|
||||
|
||||
void AdalightLightEffect::apply(light::AddressableLight &it, const light::ESPColor ¤t_color) {
|
||||
const uint32_t now = millis();
|
||||
|
||||
if (now - this->last_ack_ >= ADALIGHT_ACK_INTERVAL) {
|
||||
ESP_LOGV(TAG, "Sending ACK");
|
||||
this->write_str("Ada\n");
|
||||
this->last_ack_ = now;
|
||||
}
|
||||
|
||||
if (!this->last_reset_) {
|
||||
ESP_LOGW(TAG, "Frame: Reset.");
|
||||
reset_frame_(it);
|
||||
blank_all_leds_(it);
|
||||
this->last_reset_ = now;
|
||||
}
|
||||
|
||||
if (!this->frame_.empty() && now - this->last_byte_ >= ADALIGHT_RECEIVE_TIMEOUT) {
|
||||
ESP_LOGW(TAG, "Frame: Receive timeout (size=%zu).", this->frame_.size());
|
||||
reset_frame_(it);
|
||||
blank_all_leds_(it);
|
||||
}
|
||||
|
||||
if (this->available() > 0) {
|
||||
ESP_LOGV(TAG, "Frame: Available (size=%d).", this->available());
|
||||
}
|
||||
|
||||
while (this->available() != 0) {
|
||||
uint8_t data;
|
||||
if (!this->read_byte(&data))
|
||||
break;
|
||||
this->frame_.push_back(data);
|
||||
this->last_byte_ = now;
|
||||
|
||||
switch (this->parse_frame_(it)) {
|
||||
case INVALID:
|
||||
ESP_LOGD(TAG, "Frame: Invalid (size=%zu, first=%d).", this->frame_.size(), this->frame_[0]);
|
||||
reset_frame_(it);
|
||||
break;
|
||||
|
||||
case PARTIAL:
|
||||
break;
|
||||
|
||||
case CONSUMED:
|
||||
ESP_LOGV(TAG, "Frame: Consumed (size=%zu).", this->frame_.size());
|
||||
reset_frame_(it);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AdalightLightEffect::Frame AdalightLightEffect::parse_frame_(light::AddressableLight &it) {
|
||||
if (frame_.empty())
|
||||
return INVALID;
|
||||
|
||||
// Check header: `Ada`
|
||||
if (frame_[0] != 'A')
|
||||
return INVALID;
|
||||
if (frame_.size() > 1 && frame_[1] != 'd')
|
||||
return INVALID;
|
||||
if (frame_.size() > 2 && frame_[2] != 'a')
|
||||
return INVALID;
|
||||
|
||||
// 3 bytes: Count Hi, Count Lo, Checksum
|
||||
if (frame_.size() < 6)
|
||||
return PARTIAL;
|
||||
|
||||
// Check checksum
|
||||
uint16_t checksum = frame_[3] ^ frame_[4] ^ 0x55;
|
||||
if (checksum != frame_[5])
|
||||
return INVALID;
|
||||
|
||||
// Check if we received the full frame
|
||||
uint16_t led_count = (frame_[3] << 8) + frame_[4] + 1;
|
||||
auto buffer_size = get_frame_size_(led_count);
|
||||
if (frame_.size() < buffer_size)
|
||||
return PARTIAL;
|
||||
|
||||
// Apply lights
|
||||
auto accepted_led_count = std::min<int>(led_count, it.size());
|
||||
uint8_t *led_data = &frame_[6];
|
||||
|
||||
for (int led = 0; led < accepted_led_count; led++, led_data += 3) {
|
||||
auto white = std::min(std::min(led_data[0], led_data[1]), led_data[2]);
|
||||
|
||||
it[led].set(light::ESPColor(led_data[0], led_data[1], led_data[2], white));
|
||||
}
|
||||
|
||||
return CONSUMED;
|
||||
}
|
||||
|
||||
} // namespace adalight
|
||||
} // namespace esphome
|
41
esphome/components/adalight/adalight_light_effect.h
Normal file
41
esphome/components/adalight/adalight_light_effect.h
Normal file
@@ -0,0 +1,41 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/light/addressable_light_effect.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace adalight {
|
||||
|
||||
class AdalightLightEffect : public light::AddressableLightEffect, public uart::UARTDevice {
|
||||
public:
|
||||
AdalightLightEffect(const std::string &name);
|
||||
|
||||
public:
|
||||
void start() override;
|
||||
void stop() override;
|
||||
void apply(light::AddressableLight &it, const light::ESPColor ¤t_color) override;
|
||||
|
||||
protected:
|
||||
enum Frame {
|
||||
INVALID,
|
||||
PARTIAL,
|
||||
CONSUMED,
|
||||
};
|
||||
|
||||
int get_frame_size_(int led_count) const;
|
||||
void reset_frame_(light::AddressableLight &it);
|
||||
void blank_all_leds_(light::AddressableLight &it);
|
||||
Frame parse_frame_(light::AddressableLight &it);
|
||||
|
||||
protected:
|
||||
uint32_t last_ack_{0};
|
||||
uint32_t last_byte_{0};
|
||||
uint32_t last_reset_{0};
|
||||
std::vector<uint8_t> frame_;
|
||||
};
|
||||
|
||||
} // namespace adalight
|
||||
} // namespace esphome
|
1
esphome/components/adc/__init__.py
Normal file
1
esphome/components/adc/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
CODEOWNERS = ['@esphome/core']
|
92
esphome/components/adc/adc_sensor.cpp
Normal file
92
esphome/components/adc/adc_sensor.cpp
Normal file
@@ -0,0 +1,92 @@
|
||||
#include "adc_sensor.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#ifdef USE_ADC_SENSOR_VCC
|
||||
ADC_MODE(ADC_VCC)
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace adc {
|
||||
|
||||
static const char *TAG = "adc";
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
void ADCSensor::set_attenuation(adc_attenuation_t attenuation) { this->attenuation_ = attenuation; }
|
||||
#endif
|
||||
|
||||
void ADCSensor::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up ADC '%s'...", this->get_name().c_str());
|
||||
GPIOPin(this->pin_, INPUT).setup();
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
analogSetPinAttenuation(this->pin_, this->attenuation_);
|
||||
#endif
|
||||
}
|
||||
void ADCSensor::dump_config() {
|
||||
LOG_SENSOR("", "ADC Sensor", this);
|
||||
#ifdef ARDUINO_ARCH_ESP8266
|
||||
#ifdef USE_ADC_SENSOR_VCC
|
||||
ESP_LOGCONFIG(TAG, " Pin: VCC");
|
||||
#else
|
||||
ESP_LOGCONFIG(TAG, " Pin: %u", this->pin_);
|
||||
#endif
|
||||
#endif
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
ESP_LOGCONFIG(TAG, " Pin: %u", this->pin_);
|
||||
switch (this->attenuation_) {
|
||||
case ADC_0db:
|
||||
ESP_LOGCONFIG(TAG, " Attenuation: 0db (max 1.1V)");
|
||||
break;
|
||||
case ADC_2_5db:
|
||||
ESP_LOGCONFIG(TAG, " Attenuation: 2.5db (max 1.5V)");
|
||||
break;
|
||||
case ADC_6db:
|
||||
ESP_LOGCONFIG(TAG, " Attenuation: 6db (max 2.2V)");
|
||||
break;
|
||||
case ADC_11db:
|
||||
ESP_LOGCONFIG(TAG, " Attenuation: 11db (max 3.9V)");
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
}
|
||||
float ADCSensor::get_setup_priority() const { return setup_priority::DATA; }
|
||||
void ADCSensor::update() {
|
||||
float value_v = this->sample();
|
||||
ESP_LOGD(TAG, "'%s': Got voltage=%.2fV", this->get_name().c_str(), value_v);
|
||||
this->publish_state(value_v);
|
||||
}
|
||||
float ADCSensor::sample() {
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
float value_v = analogRead(this->pin_) / 4095.0f; // NOLINT
|
||||
switch (this->attenuation_) {
|
||||
case ADC_0db:
|
||||
value_v *= 1.1;
|
||||
break;
|
||||
case ADC_2_5db:
|
||||
value_v *= 1.5;
|
||||
break;
|
||||
case ADC_6db:
|
||||
value_v *= 2.2;
|
||||
break;
|
||||
case ADC_11db:
|
||||
value_v *= 3.9;
|
||||
break;
|
||||
}
|
||||
return value_v;
|
||||
#endif
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP8266
|
||||
#ifdef USE_ADC_SENSOR_VCC
|
||||
return ESP.getVcc() / 1024.0f;
|
||||
#else
|
||||
return analogRead(this->pin_) / 1024.0f; // NOLINT
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
#ifdef ARDUINO_ARCH_ESP8266
|
||||
std::string ADCSensor::unique_id() { return get_mac_address() + "-adc"; }
|
||||
#endif
|
||||
|
||||
} // namespace adc
|
||||
} // namespace esphome
|
42
esphome/components/adc/adc_sensor.h
Normal file
42
esphome/components/adc/adc_sensor.h
Normal file
@@ -0,0 +1,42 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/esphal.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/voltage_sampler/voltage_sampler.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace adc {
|
||||
|
||||
class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage_sampler::VoltageSampler {
|
||||
public:
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
/// Set the attenuation for this pin. Only available on the ESP32.
|
||||
void set_attenuation(adc_attenuation_t attenuation);
|
||||
#endif
|
||||
|
||||
/// Update adc values.
|
||||
void update() override;
|
||||
/// Setup ADc
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
/// `HARDWARE_LATE` setup priority.
|
||||
float get_setup_priority() const override;
|
||||
void set_pin(uint8_t pin) { this->pin_ = pin; }
|
||||
float sample() override;
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP8266
|
||||
std::string unique_id() override;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
uint8_t pin_;
|
||||
|
||||
#ifdef ARDUINO_ARCH_ESP32
|
||||
adc_attenuation_t attenuation_{ADC_0db};
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace adc
|
||||
} // namespace esphome
|
48
esphome/components/adc/sensor.py
Normal file
48
esphome/components/adc/sensor.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome import pins
|
||||
from esphome.components import sensor, voltage_sampler
|
||||
from esphome.const import CONF_ATTENUATION, CONF_ID, CONF_PIN, ICON_FLASH, UNIT_VOLT
|
||||
|
||||
|
||||
AUTO_LOAD = ['voltage_sampler']
|
||||
|
||||
ATTENUATION_MODES = {
|
||||
'0db': cg.global_ns.ADC_0db,
|
||||
'2.5db': cg.global_ns.ADC_2_5db,
|
||||
'6db': cg.global_ns.ADC_6db,
|
||||
'11db': cg.global_ns.ADC_11db,
|
||||
}
|
||||
|
||||
|
||||
def validate_adc_pin(value):
|
||||
vcc = str(value).upper()
|
||||
if vcc == 'VCC':
|
||||
return cv.only_on_esp8266(vcc)
|
||||
return pins.analog_pin(value)
|
||||
|
||||
|
||||
adc_ns = cg.esphome_ns.namespace('adc')
|
||||
ADCSensor = adc_ns.class_('ADCSensor', sensor.Sensor, cg.PollingComponent,
|
||||
voltage_sampler.VoltageSampler)
|
||||
|
||||
CONFIG_SCHEMA = sensor.sensor_schema(UNIT_VOLT, ICON_FLASH, 2).extend({
|
||||
cv.GenerateID(): cv.declare_id(ADCSensor),
|
||||
cv.Required(CONF_PIN): validate_adc_pin,
|
||||
cv.SplitDefault(CONF_ATTENUATION, esp32='0db'):
|
||||
cv.All(cv.only_on_esp32, cv.enum(ATTENUATION_MODES, lower=True)),
|
||||
}).extend(cv.polling_component_schema('60s'))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
yield sensor.register_sensor(var, config)
|
||||
|
||||
if config[CONF_PIN] == 'VCC':
|
||||
cg.add_define('USE_ADC_SENSOR_VCC')
|
||||
else:
|
||||
cg.add(var.set_pin(config[CONF_PIN]))
|
||||
|
||||
if CONF_ATTENUATION in config:
|
||||
cg.add(var.set_attenuation(config[CONF_ATTENUATION]))
|
0
esphome/components/ade7953/__init__.py
Normal file
0
esphome/components/ade7953/__init__.py
Normal file
54
esphome/components/ade7953/ade7953.cpp
Normal file
54
esphome/components/ade7953/ade7953.cpp
Normal file
@@ -0,0 +1,54 @@
|
||||
#include "ade7953.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace ade7953 {
|
||||
|
||||
static const char *TAG = "ade7953";
|
||||
|
||||
void ADE7953::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "ADE7953:");
|
||||
if (this->has_irq_) {
|
||||
ESP_LOGCONFIG(TAG, " IRQ Pin: GPIO%u", this->irq_pin_number_);
|
||||
}
|
||||
LOG_I2C_DEVICE(this);
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
LOG_SENSOR(" ", "Voltage Sensor", this->voltage_sensor_);
|
||||
LOG_SENSOR(" ", "Current A Sensor", this->current_a_sensor_);
|
||||
LOG_SENSOR(" ", "Current B Sensor", this->current_b_sensor_);
|
||||
LOG_SENSOR(" ", "Active Power A Sensor", this->active_power_a_sensor_);
|
||||
LOG_SENSOR(" ", "Active Power B Sensor", this->active_power_b_sensor_);
|
||||
}
|
||||
|
||||
#define ADE_PUBLISH_(name, factor) \
|
||||
if (name && this->name##_sensor_) { \
|
||||
float value = *name / factor; \
|
||||
this->name##_sensor_->publish_state(value); \
|
||||
}
|
||||
#define ADE_PUBLISH(name, factor) ADE_PUBLISH_(name, factor)
|
||||
|
||||
void ADE7953::update() {
|
||||
if (!this->is_setup_)
|
||||
return;
|
||||
|
||||
auto active_power_a = this->ade_read_<int32_t>(0x0312);
|
||||
ADE_PUBLISH(active_power_a, 154.0f);
|
||||
auto active_power_b = this->ade_read_<int32_t>(0x0313);
|
||||
ADE_PUBLISH(active_power_b, 154.0f);
|
||||
auto current_a = this->ade_read_<uint32_t>(0x031A);
|
||||
ADE_PUBLISH(current_a, 100000.0f);
|
||||
auto current_b = this->ade_read_<uint32_t>(0x031B);
|
||||
ADE_PUBLISH(current_b, 100000.0f);
|
||||
auto voltage = this->ade_read_<uint32_t>(0x031C);
|
||||
ADE_PUBLISH(voltage, 26000.0f);
|
||||
|
||||
// auto apparent_power_a = this->ade_read_<int32_t>(0x0310);
|
||||
// auto apparent_power_b = this->ade_read_<int32_t>(0x0311);
|
||||
// auto reactive_power_a = this->ade_read_<int32_t>(0x0314);
|
||||
// auto reactive_power_b = this->ade_read_<int32_t>(0x0315);
|
||||
// auto power_factor_a = this->ade_read_<int16_t>(0x010A);
|
||||
// auto power_factor_b = this->ade_read_<int16_t>(0x010B);
|
||||
}
|
||||
|
||||
} // namespace ade7953
|
||||
} // namespace esphome
|
79
esphome/components/ade7953/ade7953.h
Normal file
79
esphome/components/ade7953/ade7953.h
Normal file
@@ -0,0 +1,79 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace ade7953 {
|
||||
|
||||
class ADE7953 : public i2c::I2CDevice, public PollingComponent {
|
||||
public:
|
||||
void set_irq_pin(uint8_t irq_pin) {
|
||||
has_irq_ = true;
|
||||
irq_pin_number_ = irq_pin;
|
||||
}
|
||||
void set_voltage_sensor(sensor::Sensor *voltage_sensor) { voltage_sensor_ = voltage_sensor; }
|
||||
void set_current_a_sensor(sensor::Sensor *current_a_sensor) { current_a_sensor_ = current_a_sensor; }
|
||||
void set_current_b_sensor(sensor::Sensor *current_b_sensor) { current_b_sensor_ = current_b_sensor; }
|
||||
void set_active_power_a_sensor(sensor::Sensor *active_power_a_sensor) {
|
||||
active_power_a_sensor_ = active_power_a_sensor;
|
||||
}
|
||||
void set_active_power_b_sensor(sensor::Sensor *active_power_b_sensor) {
|
||||
active_power_b_sensor_ = active_power_b_sensor;
|
||||
}
|
||||
|
||||
void setup() override {
|
||||
if (this->has_irq_) {
|
||||
auto pin = GPIOPin(this->irq_pin_number_, INPUT);
|
||||
this->irq_pin_ = &pin;
|
||||
this->irq_pin_->setup();
|
||||
}
|
||||
this->set_timeout(100, [this]() {
|
||||
this->ade_write_<uint8_t>(0x0010, 0x04);
|
||||
this->ade_write_<uint8_t>(0x00FE, 0xAD);
|
||||
this->ade_write_<uint16_t>(0x0120, 0x0030);
|
||||
this->is_setup_ = true;
|
||||
});
|
||||
}
|
||||
|
||||
void dump_config() override;
|
||||
|
||||
void update() override;
|
||||
|
||||
protected:
|
||||
template<typename T> bool ade_write_(uint16_t reg, T value) {
|
||||
std::vector<uint8_t> data;
|
||||
data.push_back(reg >> 8);
|
||||
data.push_back(reg >> 0);
|
||||
for (int i = sizeof(T) - 1; i >= 0; i--)
|
||||
data.push_back(value >> (i * 8));
|
||||
return this->write_bytes_raw(data);
|
||||
}
|
||||
template<typename T> optional<T> ade_read_(uint16_t reg) {
|
||||
uint8_t hi = reg >> 8;
|
||||
uint8_t lo = reg >> 0;
|
||||
if (!this->write_bytes_raw({hi, lo}))
|
||||
return {};
|
||||
auto ret = this->read_bytes_raw<sizeof(T)>();
|
||||
if (!ret.has_value())
|
||||
return {};
|
||||
T result = 0;
|
||||
for (int i = 0, j = sizeof(T) - 1; i < sizeof(T); i++, j--)
|
||||
result |= T((*ret)[i]) << (j * 8);
|
||||
return result;
|
||||
}
|
||||
|
||||
bool has_irq_ = false;
|
||||
uint8_t irq_pin_number_;
|
||||
GPIOPin *irq_pin_{nullptr};
|
||||
bool is_setup_{false};
|
||||
sensor::Sensor *voltage_sensor_{nullptr};
|
||||
sensor::Sensor *current_a_sensor_{nullptr};
|
||||
sensor::Sensor *current_b_sensor_{nullptr};
|
||||
sensor::Sensor *active_power_a_sensor_{nullptr};
|
||||
sensor::Sensor *active_power_b_sensor_{nullptr};
|
||||
};
|
||||
|
||||
} // namespace ade7953
|
||||
} // namespace esphome
|
44
esphome/components/ade7953/sensor.py
Normal file
44
esphome/components/ade7953/sensor.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import sensor, i2c
|
||||
from esphome import pins
|
||||
from esphome.const import CONF_ID, CONF_VOLTAGE, \
|
||||
UNIT_VOLT, ICON_FLASH, UNIT_AMPERE, UNIT_WATT
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
|
||||
ade7953_ns = cg.esphome_ns.namespace('ade7953')
|
||||
ADE7953 = ade7953_ns.class_('ADE7953', cg.PollingComponent, i2c.I2CDevice)
|
||||
|
||||
CONF_IRQ_PIN = 'irq_pin'
|
||||
CONF_CURRENT_A = 'current_a'
|
||||
CONF_CURRENT_B = 'current_b'
|
||||
CONF_ACTIVE_POWER_A = 'active_power_a'
|
||||
CONF_ACTIVE_POWER_B = 'active_power_b'
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_id(ADE7953),
|
||||
cv.Optional(CONF_IRQ_PIN): pins.input_pin,
|
||||
cv.Optional(CONF_VOLTAGE): sensor.sensor_schema(UNIT_VOLT, ICON_FLASH, 1),
|
||||
cv.Optional(CONF_CURRENT_A): sensor.sensor_schema(UNIT_AMPERE, ICON_FLASH, 2),
|
||||
cv.Optional(CONF_CURRENT_B): sensor.sensor_schema(UNIT_AMPERE, ICON_FLASH, 2),
|
||||
cv.Optional(CONF_ACTIVE_POWER_A): sensor.sensor_schema(UNIT_WATT, ICON_FLASH, 1),
|
||||
cv.Optional(CONF_ACTIVE_POWER_B): sensor.sensor_schema(UNIT_WATT, ICON_FLASH, 1),
|
||||
}).extend(cv.polling_component_schema('60s')).extend(i2c.i2c_device_schema(0x38))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
yield i2c.register_i2c_device(var, config)
|
||||
|
||||
if CONF_IRQ_PIN in config:
|
||||
cg.add(var.set_irq_pin(config[CONF_IRQ_PIN]))
|
||||
|
||||
for key in [CONF_VOLTAGE, CONF_CURRENT_A, CONF_CURRENT_B, CONF_ACTIVE_POWER_A,
|
||||
CONF_ACTIVE_POWER_B]:
|
||||
if key not in config:
|
||||
continue
|
||||
conf = config[key]
|
||||
sens = yield sensor.new_sensor(conf)
|
||||
cg.add(getattr(var, f'set_{key}_sensor')(sens))
|
25
esphome/components/ads1115/__init__.py
Normal file
25
esphome/components/ads1115/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c
|
||||
from esphome.const import CONF_ID
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
AUTO_LOAD = ['sensor', 'voltage_sampler']
|
||||
MULTI_CONF = True
|
||||
|
||||
ads1115_ns = cg.esphome_ns.namespace('ads1115')
|
||||
ADS1115Component = ads1115_ns.class_('ADS1115Component', cg.Component, i2c.I2CDevice)
|
||||
|
||||
CONF_CONTINUOUS_MODE = 'continuous_mode'
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_id(ADS1115Component),
|
||||
cv.Optional(CONF_CONTINUOUS_MODE, default=False): cv.boolean,
|
||||
}).extend(cv.COMPONENT_SCHEMA).extend(i2c.i2c_device_schema(None))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
yield i2c.register_i2c_device(var, config)
|
||||
|
||||
cg.add(var.set_continuous_mode(config[CONF_CONTINUOUS_MODE]))
|
169
esphome/components/ads1115/ads1115.cpp
Normal file
169
esphome/components/ads1115/ads1115.cpp
Normal file
@@ -0,0 +1,169 @@
|
||||
#include "ads1115.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace ads1115 {
|
||||
|
||||
static const char *TAG = "ads1115";
|
||||
static const uint8_t ADS1115_REGISTER_CONVERSION = 0x00;
|
||||
static const uint8_t ADS1115_REGISTER_CONFIG = 0x01;
|
||||
|
||||
static const uint8_t ADS1115_DATA_RATE_860_SPS = 0b111;
|
||||
|
||||
void ADS1115Component::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up ADS1115...");
|
||||
uint16_t value;
|
||||
if (!this->read_byte_16(ADS1115_REGISTER_CONVERSION, &value)) {
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
uint16_t config = 0;
|
||||
// Clear single-shot bit
|
||||
// 0b0xxxxxxxxxxxxxxx
|
||||
config |= 0b0000000000000000;
|
||||
// Setup multiplexer
|
||||
// 0bx000xxxxxxxxxxxx
|
||||
config |= ADS1115_MULTIPLEXER_P0_N1 << 12;
|
||||
|
||||
// Setup Gain
|
||||
// 0bxxxx000xxxxxxxxx
|
||||
config |= ADS1115_GAIN_6P144 << 9;
|
||||
|
||||
if (this->continuous_mode_) {
|
||||
// Set continuous mode
|
||||
// 0bxxxxxxx0xxxxxxxx
|
||||
config |= 0b0000000000000000;
|
||||
} else {
|
||||
// Set singleshot mode
|
||||
// 0bxxxxxxx1xxxxxxxx
|
||||
config |= 0b0000000100000000;
|
||||
}
|
||||
|
||||
// Set data rate - 860 samples per second (we're in singleshot mode)
|
||||
// 0bxxxxxxxx100xxxxx
|
||||
config |= ADS1115_DATA_RATE_860_SPS << 5;
|
||||
|
||||
// Set comparator mode - hysteresis
|
||||
// 0bxxxxxxxxxxx0xxxx
|
||||
config |= 0b0000000000000000;
|
||||
|
||||
// Set comparator polarity - active low
|
||||
// 0bxxxxxxxxxxxx0xxx
|
||||
config |= 0b0000000000000000;
|
||||
|
||||
// Set comparator latch enabled - false
|
||||
// 0bxxxxxxxxxxxxx0xx
|
||||
config |= 0b0000000000000000;
|
||||
|
||||
// Set comparator que mode - disabled
|
||||
// 0bxxxxxxxxxxxxxx11
|
||||
config |= 0b0000000000000011;
|
||||
|
||||
if (!this->write_byte_16(ADS1115_REGISTER_CONFIG, config)) {
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
this->prev_config_ = config;
|
||||
|
||||
for (auto *sensor : this->sensors_) {
|
||||
this->set_interval(sensor->get_name(), sensor->update_interval(),
|
||||
[this, sensor] { this->request_measurement(sensor); });
|
||||
}
|
||||
}
|
||||
void ADS1115Component::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up ADS1115...");
|
||||
LOG_I2C_DEVICE(this);
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, "Communication with ADS1115 failed!");
|
||||
}
|
||||
|
||||
for (auto *sensor : this->sensors_) {
|
||||
LOG_SENSOR(" ", "Sensor", sensor);
|
||||
ESP_LOGCONFIG(TAG, " Multiplexer: %u", sensor->get_multiplexer());
|
||||
ESP_LOGCONFIG(TAG, " Gain: %u", sensor->get_gain());
|
||||
}
|
||||
}
|
||||
float ADS1115Component::request_measurement(ADS1115Sensor *sensor) {
|
||||
uint16_t config = this->prev_config_;
|
||||
// Multiplexer
|
||||
// 0bxBBBxxxxxxxxxxxx
|
||||
config &= 0b1000111111111111;
|
||||
config |= (sensor->get_multiplexer() & 0b111) << 12;
|
||||
|
||||
// Gain
|
||||
// 0bxxxxBBBxxxxxxxxx
|
||||
config &= 0b1111000111111111;
|
||||
config |= (sensor->get_gain() & 0b111) << 9;
|
||||
|
||||
if (!this->continuous_mode_) {
|
||||
// Start conversion
|
||||
config |= 0b1000000000000000;
|
||||
}
|
||||
|
||||
if (!this->continuous_mode_ || this->prev_config_ != config) {
|
||||
if (!this->write_byte_16(ADS1115_REGISTER_CONFIG, config)) {
|
||||
this->status_set_warning();
|
||||
return NAN;
|
||||
}
|
||||
this->prev_config_ = config;
|
||||
|
||||
// about 1.6 ms with 860 samples per second
|
||||
delay(2);
|
||||
|
||||
uint32_t start = millis();
|
||||
while (this->read_byte_16(ADS1115_REGISTER_CONFIG, &config) && (config >> 15) == 0) {
|
||||
if (millis() - start > 100) {
|
||||
ESP_LOGW(TAG, "Reading ADS1115 timed out");
|
||||
this->status_set_warning();
|
||||
return NAN;
|
||||
}
|
||||
yield();
|
||||
}
|
||||
}
|
||||
|
||||
uint16_t raw_conversion;
|
||||
if (!this->read_byte_16(ADS1115_REGISTER_CONVERSION, &raw_conversion)) {
|
||||
this->status_set_warning();
|
||||
return NAN;
|
||||
}
|
||||
auto signed_conversion = static_cast<int16_t>(raw_conversion);
|
||||
|
||||
float millivolts;
|
||||
switch (sensor->get_gain()) {
|
||||
case ADS1115_GAIN_6P144:
|
||||
millivolts = signed_conversion * 0.187500f;
|
||||
break;
|
||||
case ADS1115_GAIN_4P096:
|
||||
millivolts = signed_conversion * 0.125000f;
|
||||
break;
|
||||
case ADS1115_GAIN_2P048:
|
||||
millivolts = signed_conversion * 0.062500f;
|
||||
break;
|
||||
case ADS1115_GAIN_1P024:
|
||||
millivolts = signed_conversion * 0.031250f;
|
||||
break;
|
||||
case ADS1115_GAIN_0P512:
|
||||
millivolts = signed_conversion * 0.015625f;
|
||||
break;
|
||||
case ADS1115_GAIN_0P256:
|
||||
millivolts = signed_conversion * 0.007813f;
|
||||
break;
|
||||
default:
|
||||
millivolts = NAN;
|
||||
}
|
||||
|
||||
this->status_clear_warning();
|
||||
return millivolts / 1e3f;
|
||||
}
|
||||
|
||||
float ADS1115Sensor::sample() { return this->parent_->request_measurement(this); }
|
||||
void ADS1115Sensor::update() {
|
||||
float v = this->parent_->request_measurement(this);
|
||||
if (!isnan(v)) {
|
||||
ESP_LOGD(TAG, "'%s': Got Voltage=%fV", this->get_name().c_str(), v);
|
||||
this->publish_state(v);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace ads1115
|
||||
} // namespace esphome
|
71
esphome/components/ads1115/ads1115.h
Normal file
71
esphome/components/ads1115/ads1115.h
Normal file
@@ -0,0 +1,71 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
#include "esphome/components/voltage_sampler/voltage_sampler.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace ads1115 {
|
||||
|
||||
enum ADS1115Multiplexer {
|
||||
ADS1115_MULTIPLEXER_P0_N1 = 0b000,
|
||||
ADS1115_MULTIPLEXER_P0_N3 = 0b001,
|
||||
ADS1115_MULTIPLEXER_P1_N3 = 0b010,
|
||||
ADS1115_MULTIPLEXER_P2_N3 = 0b011,
|
||||
ADS1115_MULTIPLEXER_P0_NG = 0b100,
|
||||
ADS1115_MULTIPLEXER_P1_NG = 0b101,
|
||||
ADS1115_MULTIPLEXER_P2_NG = 0b110,
|
||||
ADS1115_MULTIPLEXER_P3_NG = 0b111,
|
||||
};
|
||||
|
||||
enum ADS1115Gain {
|
||||
ADS1115_GAIN_6P144 = 0b000,
|
||||
ADS1115_GAIN_4P096 = 0b001,
|
||||
ADS1115_GAIN_2P048 = 0b010,
|
||||
ADS1115_GAIN_1P024 = 0b011,
|
||||
ADS1115_GAIN_0P512 = 0b100,
|
||||
ADS1115_GAIN_0P256 = 0b101,
|
||||
};
|
||||
|
||||
class ADS1115Sensor;
|
||||
|
||||
class ADS1115Component : public Component, public i2c::I2CDevice {
|
||||
public:
|
||||
void register_sensor(ADS1115Sensor *obj) { this->sensors_.push_back(obj); }
|
||||
/// Set up the internal sensor array.
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
/// HARDWARE_LATE setup priority
|
||||
float get_setup_priority() const override { return setup_priority::DATA; }
|
||||
void set_continuous_mode(bool continuous_mode) { continuous_mode_ = continuous_mode; }
|
||||
|
||||
/// Helper method to request a measurement from a sensor.
|
||||
float request_measurement(ADS1115Sensor *sensor);
|
||||
|
||||
protected:
|
||||
std::vector<ADS1115Sensor *> sensors_;
|
||||
uint16_t prev_config_{0};
|
||||
bool continuous_mode_;
|
||||
};
|
||||
|
||||
/// Internal holder class that is in instance of Sensor so that the hub can create individual sensors.
|
||||
class ADS1115Sensor : public sensor::Sensor, public PollingComponent, public voltage_sampler::VoltageSampler {
|
||||
public:
|
||||
ADS1115Sensor(ADS1115Component *parent) : parent_(parent) {}
|
||||
void update() override;
|
||||
void set_multiplexer(ADS1115Multiplexer multiplexer) { multiplexer_ = multiplexer; }
|
||||
void set_gain(ADS1115Gain gain) { gain_ = gain; }
|
||||
|
||||
float sample() override;
|
||||
uint8_t get_multiplexer() const { return multiplexer_; }
|
||||
uint8_t get_gain() const { return gain_; }
|
||||
|
||||
protected:
|
||||
ADS1115Component *parent_;
|
||||
ADS1115Multiplexer multiplexer_;
|
||||
ADS1115Gain gain_;
|
||||
};
|
||||
|
||||
} // namespace ads1115
|
||||
} // namespace esphome
|
62
esphome/components/ads1115/sensor.py
Normal file
62
esphome/components/ads1115/sensor.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import sensor, voltage_sampler
|
||||
from esphome.const import CONF_GAIN, CONF_MULTIPLEXER, ICON_FLASH, UNIT_VOLT, CONF_ID
|
||||
from . import ads1115_ns, ADS1115Component
|
||||
|
||||
DEPENDENCIES = ['ads1115']
|
||||
|
||||
ADS1115Multiplexer = ads1115_ns.enum('ADS1115Multiplexer')
|
||||
MUX = {
|
||||
'A0_A1': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P0_N1,
|
||||
'A0_A3': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P0_N3,
|
||||
'A1_A3': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P1_N3,
|
||||
'A2_A3': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P2_N3,
|
||||
'A0_GND': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P0_NG,
|
||||
'A1_GND': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P1_NG,
|
||||
'A2_GND': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P2_NG,
|
||||
'A3_GND': ADS1115Multiplexer.ADS1115_MULTIPLEXER_P3_NG,
|
||||
}
|
||||
|
||||
ADS1115Gain = ads1115_ns.enum('ADS1115Gain')
|
||||
GAIN = {
|
||||
'6.144': ADS1115Gain.ADS1115_GAIN_6P144,
|
||||
'4.096': ADS1115Gain.ADS1115_GAIN_4P096,
|
||||
'2.048': ADS1115Gain.ADS1115_GAIN_2P048,
|
||||
'1.024': ADS1115Gain.ADS1115_GAIN_1P024,
|
||||
'0.512': ADS1115Gain.ADS1115_GAIN_0P512,
|
||||
'0.256': ADS1115Gain.ADS1115_GAIN_0P256,
|
||||
}
|
||||
|
||||
|
||||
def validate_gain(value):
|
||||
if isinstance(value, float):
|
||||
value = f'{value:0.03f}'
|
||||
elif not isinstance(value, str):
|
||||
raise cv.Invalid(f'invalid gain "{value}"')
|
||||
|
||||
return cv.enum(GAIN)(value)
|
||||
|
||||
|
||||
ADS1115Sensor = ads1115_ns.class_('ADS1115Sensor', sensor.Sensor, cg.PollingComponent,
|
||||
voltage_sampler.VoltageSampler)
|
||||
|
||||
CONF_ADS1115_ID = 'ads1115_id'
|
||||
CONFIG_SCHEMA = sensor.sensor_schema(UNIT_VOLT, ICON_FLASH, 3).extend({
|
||||
cv.GenerateID(): cv.declare_id(ADS1115Sensor),
|
||||
cv.GenerateID(CONF_ADS1115_ID): cv.use_id(ADS1115Component),
|
||||
cv.Required(CONF_MULTIPLEXER): cv.enum(MUX, upper=True, space='_'),
|
||||
cv.Required(CONF_GAIN): validate_gain,
|
||||
}).extend(cv.polling_component_schema('60s'))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
paren = yield cg.get_variable(config[CONF_ADS1115_ID])
|
||||
var = cg.new_Pvariable(config[CONF_ID], paren)
|
||||
yield sensor.register_sensor(var, config)
|
||||
yield cg.register_component(var, config)
|
||||
|
||||
cg.add(var.set_multiplexer(config[CONF_MULTIPLEXER]))
|
||||
cg.add(var.set_gain(config[CONF_GAIN]))
|
||||
|
||||
cg.add(paren.register_sensor(var))
|
0
esphome/components/aht10/__init__.py
Normal file
0
esphome/components/aht10/__init__.py
Normal file
127
esphome/components/aht10/aht10.cpp
Normal file
127
esphome/components/aht10/aht10.cpp
Normal file
@@ -0,0 +1,127 @@
|
||||
// Implementation based on:
|
||||
// - AHT10: https://github.com/Thinary/AHT10
|
||||
// - Official Datasheet (cn):
|
||||
// http://www.aosong.com/userfiles/files/media/aht10%E8%A7%84%E6%A0%BC%E4%B9%A6v1_1%EF%BC%8820191015%EF%BC%89.pdf
|
||||
// - Unofficial Translated Datasheet (en):
|
||||
// https://wiki.liutyi.info/download/attachments/30507639/Aosong_AHT10_en_draft_0c.pdf
|
||||
//
|
||||
// When configured for humidity, the log 'Components should block for at most 20-30ms in loop().' will be generated in
|
||||
// verbose mode. This is due to technical specs of the sensor and can not be avoided.
|
||||
//
|
||||
// According to the datasheet, the component is supposed to respond in more than 75ms. In fact, it can answer almost
|
||||
// immediately for temperature. But for humidity, it takes >90ms to get a valid data. From experience, we have best
|
||||
// results making successive requests; the current implementation make 3 attemps with a delay of 30ms each time.
|
||||
|
||||
#include "aht10.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace aht10 {
|
||||
|
||||
static const char *TAG = "aht10";
|
||||
static const uint8_t AHT10_CALIBRATE_CMD[] = {0xE1};
|
||||
static const uint8_t AHT10_MEASURE_CMD[] = {0xAC, 0x33, 0x00};
|
||||
static const uint8_t AHT10_DEFAULT_DELAY = 5; // ms, for calibration and temperature measurement
|
||||
static const uint8_t AHT10_HUMIDITY_DELAY = 30; // ms
|
||||
static const uint8_t AHT10_ATTEMPS = 3; // safety margin, normally 3 attemps are enough: 3*30=90ms
|
||||
|
||||
void AHT10Component::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up AHT10...");
|
||||
|
||||
if (!this->write_bytes(0, AHT10_CALIBRATE_CMD, sizeof(AHT10_CALIBRATE_CMD))) {
|
||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
uint8_t data;
|
||||
if (!this->read_byte(0, &data, AHT10_DEFAULT_DELAY)) {
|
||||
ESP_LOGD(TAG, "Communication with AHT10 failed!");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
if ((data & 0x68) != 0x08) { // Bit[6:5] = 0b00, NORMAL mode and Bit[3] = 0b1, CALIBRATED
|
||||
ESP_LOGE(TAG, "AHT10 calibration failed!");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
ESP_LOGV(TAG, "AHT10 calibrated");
|
||||
}
|
||||
|
||||
void AHT10Component::update() {
|
||||
if (!this->write_bytes(0, AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD))) {
|
||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
||||
this->status_set_warning();
|
||||
return;
|
||||
}
|
||||
uint8_t data[6];
|
||||
uint8_t delay = AHT10_DEFAULT_DELAY;
|
||||
if (this->humidity_sensor_ != nullptr)
|
||||
delay = AHT10_HUMIDITY_DELAY;
|
||||
for (int i = 0; i < AHT10_ATTEMPS; ++i) {
|
||||
ESP_LOGVV(TAG, "Attemps %u at %6ld", i, millis());
|
||||
if (!this->read_bytes(0, data, 6, delay)) {
|
||||
ESP_LOGD(TAG, "Communication with AHT10 failed, waiting...");
|
||||
} else if ((data[0] & 0x80) == 0x80) { // Bit[7] = 0b1, device is busy
|
||||
ESP_LOGD(TAG, "AHT10 is busy, waiting...");
|
||||
} else if (data[1] == 0x0 && data[2] == 0x0 && (data[3] >> 4) == 0x0) {
|
||||
// Unrealistic humidity (0x0)
|
||||
if (this->humidity_sensor_ == nullptr) {
|
||||
ESP_LOGVV(TAG, "ATH10 Unrealistic humidity (0x0), but humidity is not required");
|
||||
break;
|
||||
} else {
|
||||
ESP_LOGD(TAG, "ATH10 Unrealistic humidity (0x0), retrying...");
|
||||
if (!this->write_bytes(0, AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD))) {
|
||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
||||
this->status_set_warning();
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// data is valid, we can break the loop
|
||||
ESP_LOGVV(TAG, "Answer at %6ld", millis());
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ((data[0] & 0x80) == 0x80) {
|
||||
ESP_LOGE(TAG, "Measurements reading timed-out!");
|
||||
this->status_set_warning();
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t raw_temperature = ((data[3] & 0x0F) << 16) | (data[4] << 8) | data[5];
|
||||
uint32_t raw_humidity = ((data[1] << 16) | (data[2] << 8) | data[3]) >> 4;
|
||||
|
||||
float temperature = ((200.0 * (float) raw_temperature) / 1048576.0) - 50.0;
|
||||
float humidity;
|
||||
if (raw_humidity == 0) { // unrealistic value
|
||||
humidity = NAN;
|
||||
} else {
|
||||
humidity = (float) raw_humidity * 100.0 / 1048576.0;
|
||||
}
|
||||
|
||||
if (this->temperature_sensor_ != nullptr) {
|
||||
this->temperature_sensor_->publish_state(temperature);
|
||||
}
|
||||
if (this->humidity_sensor_ != nullptr) {
|
||||
if (isnan(humidity))
|
||||
ESP_LOGW(TAG, "Invalid humidity! Sensor reported 0%% Hum");
|
||||
this->humidity_sensor_->publish_state(humidity);
|
||||
}
|
||||
this->status_clear_warning();
|
||||
}
|
||||
|
||||
float AHT10Component::get_setup_priority() const { return setup_priority::DATA; }
|
||||
|
||||
void AHT10Component::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "AHT10:");
|
||||
LOG_I2C_DEVICE(this);
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, "Communication with AHT10 failed!");
|
||||
}
|
||||
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
||||
}
|
||||
|
||||
} // namespace aht10
|
||||
} // namespace esphome
|
26
esphome/components/aht10/aht10.h
Normal file
26
esphome/components/aht10/aht10.h
Normal file
@@ -0,0 +1,26 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace aht10 {
|
||||
|
||||
class AHT10Component : public PollingComponent, public i2c::I2CDevice {
|
||||
public:
|
||||
void setup() override;
|
||||
void update() override;
|
||||
void dump_config() override;
|
||||
float get_setup_priority() const override;
|
||||
|
||||
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { humidity_sensor_ = humidity_sensor; }
|
||||
|
||||
protected:
|
||||
sensor::Sensor *temperature_sensor_;
|
||||
sensor::Sensor *humidity_sensor_;
|
||||
};
|
||||
|
||||
} // namespace aht10
|
||||
} // namespace esphome
|
30
esphome/components/aht10/sensor.py
Normal file
30
esphome/components/aht10/sensor.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
from esphome.const import CONF_HUMIDITY, CONF_ID, CONF_TEMPERATURE, \
|
||||
UNIT_CELSIUS, ICON_THERMOMETER, ICON_WATER_PERCENT, UNIT_PERCENT
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
|
||||
aht10_ns = cg.esphome_ns.namespace('aht10')
|
||||
AHT10Component = aht10_ns.class_('AHT10Component', cg.PollingComponent, i2c.I2CDevice)
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_id(AHT10Component),
|
||||
cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema(UNIT_CELSIUS, ICON_THERMOMETER, 2),
|
||||
cv.Optional(CONF_HUMIDITY): sensor.sensor_schema(UNIT_PERCENT, ICON_WATER_PERCENT, 2),
|
||||
}).extend(cv.polling_component_schema('60s')).extend(i2c.i2c_device_schema(0x38))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
yield i2c.register_i2c_device(var, config)
|
||||
|
||||
if CONF_TEMPERATURE in config:
|
||||
sens = yield sensor.new_sensor(config[CONF_TEMPERATURE])
|
||||
cg.add(var.set_temperature_sensor(sens))
|
||||
|
||||
if CONF_HUMIDITY in config:
|
||||
sens = yield sensor.new_sensor(config[CONF_HUMIDITY])
|
||||
cg.add(var.set_humidity_sensor(sens))
|
0
esphome/components/am2320/__init__.py
Normal file
0
esphome/components/am2320/__init__.py
Normal file
107
esphome/components/am2320/am2320.cpp
Normal file
107
esphome/components/am2320/am2320.cpp
Normal file
@@ -0,0 +1,107 @@
|
||||
// Implementation based on:
|
||||
// - ESPEasy: https://github.com/letscontrolit/ESPEasy/blob/mega/src/_P034_DHT12.ino
|
||||
// - DHT12_sensor_library: https://github.com/xreef/DHT12_sensor_library/blob/master/DHT12.cpp
|
||||
// - Arduino - AM2320: https://github.com/EngDial/AM2320/blob/master/src/AM2320.cpp
|
||||
|
||||
#include "am2320.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace am2320 {
|
||||
|
||||
static const char *TAG = "am2320";
|
||||
|
||||
// ---=== Calc CRC16 ===---
|
||||
uint16_t crc_16(uint8_t *ptr, uint8_t length) {
|
||||
uint16_t crc = 0xFFFF;
|
||||
uint8_t i;
|
||||
//------------------------------
|
||||
while (length--) {
|
||||
crc ^= *ptr++;
|
||||
for (i = 0; i < 8; i++)
|
||||
if ((crc & 0x01) != 0) {
|
||||
crc >>= 1;
|
||||
crc ^= 0xA001;
|
||||
} else
|
||||
crc >>= 1;
|
||||
}
|
||||
return crc;
|
||||
}
|
||||
|
||||
void AM2320Component::update() {
|
||||
uint8_t data[8];
|
||||
data[0] = 0;
|
||||
data[1] = 4;
|
||||
if (!this->read_data_(data)) {
|
||||
this->status_set_warning();
|
||||
return;
|
||||
}
|
||||
|
||||
float temperature = (((data[4] & 0x7F) << 8) + data[5]) / 10.0;
|
||||
temperature = (data[4] & 0x80) ? -temperature : temperature;
|
||||
float humidity = ((data[2] << 8) + data[3]) / 10.0;
|
||||
|
||||
ESP_LOGD(TAG, "Got temperature=%.1f°C humidity=%.1f%%", temperature, humidity);
|
||||
if (this->temperature_sensor_ != nullptr)
|
||||
this->temperature_sensor_->publish_state(temperature);
|
||||
if (this->humidity_sensor_ != nullptr)
|
||||
this->humidity_sensor_->publish_state(humidity);
|
||||
this->status_clear_warning();
|
||||
}
|
||||
void AM2320Component::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up AM2320...");
|
||||
uint8_t data[8];
|
||||
data[0] = 0;
|
||||
data[1] = 4;
|
||||
if (!this->read_data_(data)) {
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
}
|
||||
void AM2320Component::dump_config() {
|
||||
ESP_LOGD(TAG, "AM2320:");
|
||||
LOG_I2C_DEVICE(this);
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, "Communication with AM2320 failed!");
|
||||
}
|
||||
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
||||
}
|
||||
float AM2320Component::get_setup_priority() const { return setup_priority::DATA; }
|
||||
|
||||
bool AM2320Component::read_bytes_(uint8_t a_register, uint8_t *data, uint8_t len, uint32_t conversion) {
|
||||
if (!this->write_bytes(a_register, data, 2)) {
|
||||
ESP_LOGW(TAG, "Writing bytes for AM2320 failed!");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (conversion > 0)
|
||||
delay(conversion);
|
||||
return this->parent_->raw_receive(this->address_, data, len);
|
||||
}
|
||||
|
||||
bool AM2320Component::read_data_(uint8_t *data) {
|
||||
// Wake up
|
||||
this->write_bytes(0, data, 0);
|
||||
|
||||
// Write instruction 3, 2 bytes, get 8 bytes back (2 preamble, 2 bytes temperature, 2 bytes humidity, 2 bytes CRC)
|
||||
if (!this->read_bytes_(3, data, 8, 2)) {
|
||||
ESP_LOGW(TAG, "Updating AM2320 failed!");
|
||||
return false;
|
||||
}
|
||||
|
||||
uint16_t checksum;
|
||||
|
||||
checksum = data[7] << 8;
|
||||
checksum += data[6];
|
||||
|
||||
if (crc_16(data, 6) != checksum) {
|
||||
ESP_LOGW(TAG, "AM2320 Checksum invalid!");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace am2320
|
||||
} // namespace esphome
|
29
esphome/components/am2320/am2320.h
Normal file
29
esphome/components/am2320/am2320.h
Normal file
@@ -0,0 +1,29 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace am2320 {
|
||||
|
||||
class AM2320Component : public PollingComponent, public i2c::I2CDevice {
|
||||
public:
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
float get_setup_priority() const override;
|
||||
void update() override;
|
||||
|
||||
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { humidity_sensor_ = humidity_sensor; }
|
||||
|
||||
protected:
|
||||
bool read_data_(uint8_t *data);
|
||||
bool read_bytes_(uint8_t a_register, uint8_t *data, uint8_t len, uint32_t conversion = 0);
|
||||
|
||||
sensor::Sensor *temperature_sensor_;
|
||||
sensor::Sensor *humidity_sensor_;
|
||||
};
|
||||
|
||||
} // namespace am2320
|
||||
} // namespace esphome
|
30
esphome/components/am2320/sensor.py
Normal file
30
esphome/components/am2320/sensor.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
from esphome.const import CONF_HUMIDITY, CONF_ID, CONF_TEMPERATURE, \
|
||||
UNIT_CELSIUS, ICON_THERMOMETER, ICON_WATER_PERCENT, UNIT_PERCENT
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
|
||||
am2320_ns = cg.esphome_ns.namespace('am2320')
|
||||
AM2320Component = am2320_ns.class_('AM2320Component', cg.PollingComponent, i2c.I2CDevice)
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_id(AM2320Component),
|
||||
cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema(UNIT_CELSIUS, ICON_THERMOMETER, 1),
|
||||
cv.Optional(CONF_HUMIDITY): sensor.sensor_schema(UNIT_PERCENT, ICON_WATER_PERCENT, 1),
|
||||
}).extend(cv.polling_component_schema('60s')).extend(i2c.i2c_device_schema(0x5C))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
yield i2c.register_i2c_device(var, config)
|
||||
|
||||
if CONF_TEMPERATURE in config:
|
||||
sens = yield sensor.new_sensor(config[CONF_TEMPERATURE])
|
||||
cg.add(var.set_temperature_sensor(sens))
|
||||
|
||||
if CONF_HUMIDITY in config:
|
||||
sens = yield sensor.new_sensor(config[CONF_HUMIDITY])
|
||||
cg.add(var.set_humidity_sensor(sens))
|
94
esphome/components/animation/__init__.py
Normal file
94
esphome/components/animation/__init__.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import logging
|
||||
|
||||
from esphome import core
|
||||
from esphome.components import display, font
|
||||
import esphome.components.image as espImage
|
||||
import esphome.config_validation as cv
|
||||
import esphome.codegen as cg
|
||||
from esphome.const import CONF_FILE, CONF_ID, CONF_TYPE, CONF_RESIZE
|
||||
from esphome.core import CORE, HexInt
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['display']
|
||||
MULTI_CONF = True
|
||||
|
||||
Animation_ = display.display_ns.class_('Animation')
|
||||
|
||||
CONF_RAW_DATA_ID = 'raw_data_id'
|
||||
|
||||
ANIMATION_SCHEMA = cv.Schema({
|
||||
cv.Required(CONF_ID): cv.declare_id(Animation_),
|
||||
cv.Required(CONF_FILE): cv.file_,
|
||||
cv.Optional(CONF_RESIZE): cv.dimensions,
|
||||
cv.Optional(CONF_TYPE, default='BINARY'): cv.enum(espImage.IMAGE_TYPE, upper=True),
|
||||
cv.GenerateID(CONF_RAW_DATA_ID): cv.declare_id(cg.uint8),
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = cv.All(font.validate_pillow_installed, ANIMATION_SCHEMA)
|
||||
|
||||
CODEOWNERS = ['@syndlex']
|
||||
|
||||
|
||||
def to_code(config):
|
||||
from PIL import Image
|
||||
|
||||
path = CORE.relative_config_path(config[CONF_FILE])
|
||||
try:
|
||||
image = Image.open(path)
|
||||
except Exception as e:
|
||||
raise core.EsphomeError(f"Could not load image file {path}: {e}")
|
||||
|
||||
width, height = image.size
|
||||
frames = image.n_frames
|
||||
if CONF_RESIZE in config:
|
||||
image.thumbnail(config[CONF_RESIZE])
|
||||
width, height = image.size
|
||||
else:
|
||||
if width > 500 or height > 500:
|
||||
_LOGGER.warning("The image you requested is very big. Please consider using"
|
||||
" the resize parameter.")
|
||||
|
||||
if config[CONF_TYPE] == 'GRAYSCALE':
|
||||
data = [0 for _ in range(height * width * frames)]
|
||||
pos = 0
|
||||
for frameIndex in range(frames):
|
||||
image.seek(frameIndex)
|
||||
frame = image.convert('L', dither=Image.NONE)
|
||||
pixels = list(frame.getdata())
|
||||
for pix in pixels:
|
||||
data[pos] = pix
|
||||
pos += 1
|
||||
|
||||
elif config[CONF_TYPE] == 'RGB24':
|
||||
data = [0 for _ in range(height * width * 3 * frames)]
|
||||
pos = 0
|
||||
for frameIndex in range(frames):
|
||||
image.seek(frameIndex)
|
||||
frame = image.convert('RGB')
|
||||
pixels = list(frame.getdata())
|
||||
for pix in pixels:
|
||||
data[pos] = pix[0]
|
||||
pos += 1
|
||||
data[pos] = pix[1]
|
||||
pos += 1
|
||||
data[pos] = pix[2]
|
||||
pos += 1
|
||||
|
||||
elif config[CONF_TYPE] == 'BINARY':
|
||||
width8 = ((width + 7) // 8) * 8
|
||||
data = [0 for _ in range((height * width8 // 8) * frames)]
|
||||
for frameIndex in range(frames):
|
||||
image.seek(frameIndex)
|
||||
frame = image.convert('1', dither=Image.NONE)
|
||||
for y in range(height):
|
||||
for x in range(width):
|
||||
if frame.getpixel((x, y)):
|
||||
continue
|
||||
pos = x + y * width8 + (height * width8 * frameIndex)
|
||||
data[pos // 8] |= 0x80 >> (pos % 8)
|
||||
|
||||
rhs = [HexInt(x) for x in data]
|
||||
prog_arr = cg.progmem_array(config[CONF_RAW_DATA_ID], rhs)
|
||||
cg.new_Pvariable(config[CONF_ID], prog_arr, width, height, frames,
|
||||
espImage.IMAGE_TYPE[config[CONF_TYPE]])
|
23
esphome/components/apds9960/__init__.py
Normal file
23
esphome/components/apds9960/__init__.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c
|
||||
from esphome.const import CONF_ID
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
AUTO_LOAD = ['sensor', 'binary_sensor']
|
||||
MULTI_CONF = True
|
||||
|
||||
CONF_APDS9960_ID = 'apds9960_id'
|
||||
|
||||
apds9960_nds = cg.esphome_ns.namespace('apds9960')
|
||||
APDS9960 = apds9960_nds.class_('APDS9960', cg.PollingComponent, i2c.I2CDevice)
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_id(APDS9960),
|
||||
}).extend(cv.polling_component_schema('60s')).extend(i2c.i2c_device_schema(0x39))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
yield cg.register_component(var, config)
|
||||
yield i2c.register_i2c_device(var, config)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user