mirror of
				https://github.com/home-assistant/supervisor.git
				synced 2025-10-31 14:39:30 +00:00 
			
		
		
		
	Compare commits
	
		
			1221 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 7841f14163 | ||
|   | cc9f594ab4 | ||
|   | ebfaaeaa6b | ||
|   | ffa91e150d | ||
|   | 06fa9f9a9e | ||
|   | 9f203c42ec | ||
|   | 5d0d34a4af | ||
|   | c2cfc0d3d4 | ||
|   | 0f4810d41f | ||
|   | 175848f2a8 | ||
|   | 472bd66f4d | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 168ea32d2c | ||
|   | e82d6b1ea4 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 6c60ca088c | ||
|   | 83e8f935fd | ||
|   | 71867302a4 | ||
|   | 8bcc402c5f | ||
|   | 72b7d2a123 | ||
|   | 20c1183450 | ||
|   | 0bbfbd2544 | ||
|   | 350bd9c32f | ||
|   | dcca8b0a9a | ||
|   | f77b479e45 | ||
|   | 216565affb | ||
|   | 6f235c2a11 | ||
|   | 27a770bd1d | ||
|   | ef15b67571 | ||
|   | 6aad966c52 | ||
|   | 9811f11859 | ||
|   | 13148ec7fb | ||
|   | b2d7464790 | ||
|   | ce84e185ad | ||
|   | c3f5ee43b6 | ||
|   | e2dc1a4471 | ||
|   | e787e59b49 | ||
|   | f0ed2eba2b | ||
|   | 2364e1e652 | ||
|   | cc56944d75 | ||
|   | 69cea9fc96 | ||
|   | fcebc9d1ed | ||
|   | 9350e4f961 | ||
|   | 387e0ad03e | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 61fec8b290 | ||
|   | 1228baebf4 | ||
|   | a30063e85c | ||
|   | 524cebac4d | ||
|   | c94114a566 | ||
|   | b6ec7a9e64 | ||
|   | 69be7a6d22 | ||
|   | 58155c35f9 | ||
|   | 7b2377291f | ||
|   | 657ee84e39 | ||
|   | 2e4b545265 | ||
|   | 2de1d35dd1 | ||
|   | 2b082b362d | ||
|   | dfdd0d6b4b | ||
|   | a00e81c03f | ||
|   | 776e6bb418 | ||
|   | b31fca656e | ||
|   | fa783a0d2c | ||
|   | 96c0fbaf10 | ||
|   | 24f7801ddc | ||
|   | 8e83e007e9 | ||
|   | d0db466e67 | ||
|   | 3010bd4eb6 | ||
|   | 069bed8815 | ||
|   | d2088ae5f8 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 0ca5a241bb | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | dff32a8e84 | ||
|   | 4a20344652 | ||
|   | 98b969ef06 | ||
|   | c8cb8aecf7 | ||
|   | 73e8875018 | ||
|   | 02aed9c084 | ||
|   | 89148f8fff | ||
|   | 6bde527f5c | ||
|   | d62aabc01b | ||
|   | 82299a3799 | ||
|   | c02f30dd7e | ||
|   | e91983adb4 | ||
|   | ff88359429 | ||
|   | 5a60d5cbe8 | ||
|   | 2b41ffe019 | ||
|   | 1c23e26f93 | ||
|   | 3d555f951d | ||
|   | 6d39b4d7cd | ||
|   | 4fe5d09f01 | ||
|   | e52af3bfb4 | ||
|   | 0467b33cd5 | ||
|   | 14167f6e13 | ||
|   | 7a1aba6f81 | ||
|   | 920f7f2ece | ||
|   | 06fadbd70f | ||
|   | d4f486864f | ||
|   | d3a21303d9 | ||
|   | e1cbfdd84b | ||
|   | 87170a4497 | ||
|   | ae6f8bd345 | ||
|   | b9496e0972 | ||
|   | c36a6dcd65 | ||
|   | 19ca836b78 | ||
|   | 8a6ea7ab50 | ||
|   | 6721b8f265 | ||
|   | 9393521f98 | ||
|   | 398b24e0ab | ||
|   | 374bcf8073 | ||
|   | 7e3859e2f5 | ||
|   | 490ec0d462 | ||
|   | 15bf1ee50e | ||
|   | 6376d92a0d | ||
|   | 10230b0b4c | ||
|   | 2495cda5ec | ||
|   | ae8ddca040 | ||
|   | 0212d027fb | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | a3096153ab | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 7434ca9e99 | ||
|   | 4ac7f7dcf0 | ||
|   | e9f5b13aa5 | ||
|   | 1fbb6d46ea | ||
|   | 8dbfea75b1 | ||
|   | 3b3840c087 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | a21353909d | ||
|   | 5497ed885a | ||
|   | 39baea759a | ||
|   | 80ddb1d262 | ||
|   | e24987a610 | ||
|   | 9e5c276e3b | ||
|   | c33d31996d | ||
|   | aa1f08fe8a | ||
|   | d78689554a | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 5bee1d851c | ||
|   | ddb8eef4d1 | ||
|   | da513e7347 | ||
|   | 4279d7fd16 | ||
|   | 934eab2e8c | ||
|   | 2a31edc768 | ||
|   | fcdd66dc6e | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | a65d3222b9 | ||
|   | 36179596a0 | ||
|   | c083c850c1 | ||
|   | ff903d7b5a | ||
|   | dd603e1ec2 | ||
|   | a2f06b1553 | ||
|   | 8115d2b3d3 | ||
|   | 4f97bb9e0b | ||
|   | 84d24a2c4d | ||
|   | b709061656 | ||
|   | cd9034b3f1 | ||
|   | 25d324c73a | ||
|   | 3a834d1a73 | ||
|   | e9fecb817d | ||
|   | 56e70d7ec4 | ||
|   | 2e73a85aa9 | ||
|   | 1e119e9c03 | ||
|   | 6f6e5c97df | ||
|   | 6ef99974cf | ||
|   | 8984b9aef6 | ||
|   | 63e08b15bc | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 319b2b5d4c | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | bae7bb8ce4 | ||
|   | 0b44df366c | ||
|   | f253c797af | ||
|   | 0a8b1c2797 | ||
|   | 3b45fb417b | ||
|   | 2a2d92e3c5 | ||
|   | a320e42ed5 | ||
|   | fdef712e01 | ||
|   | 5717ac19d7 | ||
|   | 33d7d76fee | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 73bdaa623c | ||
|   | 8ca8f59a0b | ||
|   | 745af3c039 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 5d17e1011a | ||
|   | 826464c41b | ||
|   | a643df8cac | ||
|   | 24ded99286 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 6646eee504 | ||
|   | f55c10914e | ||
|   | b1e768f69e | ||
|   | 4702f8bd5e | ||
|   | 69959b2c97 | ||
|   | 9d6f4f5392 | ||
|   | 36b9a609bf | ||
|   | 36ae0c82b6 | ||
|   | e11011ee51 | ||
|   | 9125211a57 | ||
|   | 3a4ef6ceb3 | ||
|   | ca82993278 | ||
|   | 0925af91e3 | ||
|   | 80bc32243c | ||
|   | f0d232880d | ||
|   | 7c790dbbd9 | ||
|   | 899b17e992 | ||
|   | d1b4521290 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 9bb4feef29 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 4bcdc98a31 | ||
|   | 26f8c1df92 | ||
|   | a481ad73f3 | ||
|   | e4ac17fea6 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | bcd940e95b | ||
|   | 5365aa4466 | ||
|   | a0d106529c | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | bf1a9ec42d | ||
|   | fc5d97562f | ||
|   | f5c171e44f | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | a3c3f15806 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | ef58a219ec | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 6708fe36e3 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | e02fa2824c | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | a20f927082 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 6d71e3fe81 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 4056fcd75d | ||
|   | 1e723cf0e3 | ||
|   | ce3f670597 | ||
|   | ce3d3d58ec | ||
|   | a92cab48e0 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | ee76317392 | ||
|   | 380ca13be1 | ||
|   | 93f4c5e207 | ||
|   | e438858da0 | ||
|   | 428a4dd849 | ||
|   | 39cc8aaa13 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 39a62864de | ||
|   | 71a162a871 | ||
|   | 05d7eff09a | ||
|   | 7b8ad0782d | ||
|   | df3e9e3a5e | ||
|   | 8cdc769ec8 | ||
|   | 76e1304241 | ||
|   | eb9b1ff03d | ||
|   | b3b12d35fd | ||
|   | 74485262e7 | ||
|   | 615e68b29b | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 927b4695c9 | ||
|   | 11811701d0 | ||
|   | 05c8022db3 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | a9ebb147c5 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | ba8ca4d9ee | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 3574df1385 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | b4497d231b | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 5aa9b0245a | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 4c72c3aafc | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | bf4f40f991 | ||
|   | 603334f4f3 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 46548af165 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 8ef32b40c8 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | fb25377087 | ||
|   | a75fd2d07e | ||
|   | e30f39e97e | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 4818ad7465 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 5e4e9740c7 | ||
|   | d4e41dbf80 | ||
|   | cea1a1a15f | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | c2700b14dc | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 07d27170db | ||
|   | 8eb8c07df6 | ||
|   | 7bee6f884c | ||
|   | 78dd20e314 | ||
|   | 2a011b6448 | ||
|   | 5c90370ec8 | ||
|   | 120465b88d | ||
|   | c77292439a | ||
|   | 0a0209f81a | ||
|   | 69a7ed8a5c | ||
|   | 8df35ab488 | ||
|   | a12567d0a8 | ||
|   | 64fe190119 | ||
|   | e3ede66943 | ||
|   | 2672b800d4 | ||
|   | c60d4bda92 | ||
|   | db9d0f2639 | ||
|   | 02d4045ec3 | ||
|   | a308ea6927 | ||
|   | edc5e5e812 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 23b65cb479 | ||
|   | e5eabd2143 | ||
|   | b0dd043975 | ||
|   | 435a1096ed | ||
|   | 21a9084ca0 | ||
|   | 10d9135d86 | ||
|   | 272d8b29f3 | ||
|   | 3d665b9eec | ||
|   | c563f484c9 | ||
|   | 38268ea4ea | ||
|   | c1ad64cddf | ||
|   | b898cd2a3a | ||
|   | 937b31d845 | ||
|   | e4e655493b | ||
|   | 387d2dcc2e | ||
|   | 8abe33d48a | ||
|   | 860442d5c4 | ||
|   | ce5183ce16 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 3e69b04b86 | ||
|   | 8b9cd4f122 | ||
|   | c0e3ccdb83 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | e8cc85c487 | ||
|   | b3eff41692 | ||
|   | 1ea63f185c | ||
|   | a513d5c09a | ||
|   | fb8216c102 | ||
|   | 4f381d01df | ||
|   | de3382226e | ||
|   | 77be830b72 | ||
|   | 09c0e1320f | ||
|   | cc4ee59542 | ||
|   | 1f448744f3 | ||
|   | ee2c257057 | ||
|   | be8439d4ac | ||
|   | 981f2b193c | ||
|   | 39087e09ce | ||
|   | 59960efb9c | ||
|   | 5a53bb5981 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | a67fe69cbb | ||
|   | 9ce2b0765f | ||
|   | 2e53a48504 | ||
|   | 8e4db0c3ec | ||
|   | 4072b06faf | ||
|   | a2cf7ece70 | ||
|   | 734fe3afde | ||
|   | 7f3bc91c1d | ||
|   | 9c2c95757d | ||
|   | b5ed6c586a | ||
|   | 35033d1f76 | ||
|   | 9e41d0c5b0 | ||
|   | 62e92fada9 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | ae0a1a657f | ||
|   | 81e511ba8e | ||
|   | d89cb91c8c | ||
|   | dc31b6e6fe | ||
|   | 930a32de1a | ||
|   | e40f2ed8e3 | ||
|   | abbd3d1078 | ||
|   | 63c9948456 | ||
|   | b6c81d779a | ||
|   | 2480c83169 | ||
|   | 334cc66cf6 | ||
|   | 3cf189ad94 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 6ffb94a0f5 | ||
|   | 3593826441 | ||
|   | 0a0a62f238 | ||
|   | 41ce9913d2 | ||
|   | b77c42384d | ||
|   | 138bb12f98 | ||
|   | 4fe2859f4e | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 0768b2b4bc | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | e6f1772a93 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 5374b2b3b9 | ||
|   | 1196788856 | ||
|   | 9f3f47eb80 | ||
|   | 1a90a478f2 | ||
|   | ee773f3b63 | ||
|   | 5ffc27f60c | ||
|   | 4c13dfb43c | ||
|   | bc099f0d81 | ||
|   | b26dd0af19 | ||
|   | 0dee5bd763 | ||
|   | 0765387ad8 | ||
|   | a07517bd3c | ||
|   | e5f0d80d96 | ||
|   | 2fc5e3b7d9 | ||
|   | 778bc46848 | ||
|   | 882586b246 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | b7c07a2555 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 814b504fa9 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 7ae430e7a8 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 0e7e95ba20 | ||
|   | e577d8acb2 | ||
|   | 0a76ab5054 | ||
|   | 03c5596e04 | ||
|   | 3af4e14e83 | ||
|   | 7c8cf57820 | ||
|   | 8d84a8a62e | ||
|   | 08c45060bd | ||
|   | 7ca8d2811b | ||
|   | bb6898b032 | ||
|   | cd86c6814e | ||
|   | b67e116650 | ||
|   | 57ce411fb6 | ||
|   | 85ed4d9e8d | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | ccb39da569 | ||
|   | dd7ba64d32 | ||
|   | de3edb1654 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | d262151727 | ||
|   | a37c90af96 | ||
|   | 0a3a752b4c | ||
|   | 0a34f427f8 | ||
|   | 157740e374 | ||
|   | b0e994f3f5 | ||
|   | f374852801 | ||
|   | 709f034f2e | ||
|   | 6d6deb8c66 | ||
|   | 5771b417bc | ||
|   | 51efcefdab | ||
|   | d31ab5139d | ||
|   | ce18183daa | ||
|   | b8b73cf880 | ||
|   | 5291e6c1f3 | ||
|   | 626a9f06c4 | ||
|   | 72338eb5b8 | ||
|   | 7bd77c6e99 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 69151b962a | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 86305d4fe4 | ||
|   | d5c3850a3f | ||
|   | 3e645b6175 | ||
|   | 89dc78bc05 | ||
|   | 164c403d05 | ||
|   | 5e8007453f | ||
|   | 0a0d97b084 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | eb604ed92d | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | c47828dbaa | ||
|   | ea437dc745 | ||
|   | c16a208b39 | ||
| ![dependabot-preview[bot]](/assets/img/avatar_default.png)  | 55d803b2a0 | ||
|   | 611f6f2829 | ||
|   | b94df76731 | ||
|   | 218619e7f0 | ||
|   | 273eed901a | ||
|   | 8ea712a937 | ||
|   | 658449a7a0 | ||
|   | 968c471591 | ||
|   | b4665f3907 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 496cee1ec4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0f8c80f3ba | ||
|   | 6c28f82239 | ||
|   | def32abb57 | ||
|   | f57a241b9e | ||
|   | 11a7e8b15d | ||
|   | fa4f7697b7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6098b7de8e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0a382ce54d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | dd53aaa30c | ||
|   | 31e175a15a | ||
|   | 4c80727bcc | ||
|   | b2c3157361 | ||
|   | dc4f38ebd0 | ||
|   | 7c9437c6ee | ||
|   | 9ce9e10dfd | ||
|   | 4e94043bca | ||
|   | 749d45bf13 | ||
|   | ce99b3e259 | ||
|   | 2c84daefab | ||
|   | dc1933fa88 | ||
|   | 6970cebf80 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a234006de2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2484149323 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 778148424c | ||
|   | 55f4a2395e | ||
|   | 5a45d47ed8 | ||
|   | da601d1483 | ||
|   | e98a1272e9 | ||
|   | 90e9cf788b | ||
|   | ec387c3010 | ||
|   | 7e5a960c98 | ||
|   | f1bcbf2416 | ||
|   | bce144e197 | ||
|   | 86a3735d83 | ||
|   | decf254e5f | ||
|   | e10fe16f21 | ||
|   | 996891a740 | ||
|   | 7385d026ea | ||
|   | 09f43d6f3c | ||
|   | 6906e757dd | ||
|   | 963d242afa | ||
|   | 3ed7cbe2ed | ||
|   | 0da924f10b | ||
|   | 76411da0a7 | ||
|   | ce87a72cf0 | ||
|   | f8c9e2f295 | ||
|   | 00af027e51 | ||
|   | c91fce3281 | ||
|   | fb6df18ce9 | ||
|   | 31f5c6f938 | ||
|   | d3a44b2992 | ||
|   | b537a03e6d | ||
|   | 46093379e4 | ||
|   | 1b17d90504 | ||
|   | 7d42dd7ac2 | ||
|   | f35dcfcfd3 | ||
|   | c4f223c38a | ||
|   | 71362f2c76 | ||
|   | 96beac9fd9 | ||
|   | 608c0e5076 | ||
|   | 16ef6d82d2 | ||
|   | 51940222be | ||
|   | 21f3c4820b | ||
|   | 214c6f919e | ||
|   | d9d438d571 | ||
|   | cf60d1f55c | ||
|   | f9aa12cbad | ||
|   | 76266cc18b | ||
|   | 50b9506ff3 | ||
|   | 754cd64213 | ||
|   | 113b62ee77 | ||
|   | d9874c4c3e | ||
|   | ca44e858c5 | ||
|   | c7ca4de307 | ||
|   | b77146a4e0 | ||
|   | 45b4800378 | ||
|   | 7f9232d2b9 | ||
|   | d90426f745 | ||
|   | c2deabb672 | ||
|   | ead5993f3e | ||
|   | 1bcd74e8fa | ||
|   | 118da3c275 | ||
|   | d7bb9013d4 | ||
|   | 812c46d82b | ||
|   | c0462b28cd | ||
|   | 82b2f66920 | ||
|   | 01da42e1b6 | ||
|   | d652d22547 | ||
|   | baea84abe6 | ||
|   | c2d705a42a | ||
|   | f10b433e1f | ||
|   | 67f562a846 | ||
|   | 1edec61133 | ||
|   | c13a33bf71 | ||
|   | 2ae93ae7b1 | ||
|   | 8451020afe | ||
|   | a48e568efc | ||
|   | dee2808cb5 | ||
|   | 06a2ab26a2 | ||
|   | 45de0f2f39 | ||
|   | bac5f704dc | ||
|   | 79669a5d04 | ||
|   | a6e712c9ea | ||
|   | 069fe99699 | ||
|   | 4754f067ad | ||
|   | dce9818812 | ||
|   | d054b6dbb7 | ||
|   | 3093165325 | ||
|   | fd9c5bd412 | ||
|   | 9a8850fecd | ||
|   | b12175ab9a | ||
|   | b52f90187b | ||
|   | 4eb02f474d | ||
|   | dfdcddfd0b | ||
|   | 0391277bad | ||
|   | 73643b9bfe | ||
|   | 93a52b8382 | ||
|   | 7a91bb1f6c | ||
|   | 26efa998a1 | ||
|   | fc9f3fee0a | ||
|   | ec19bd570b | ||
|   | 3335bad9e1 | ||
|   | 71ae334e24 | ||
|   | 0807651fbd | ||
|   | 7026d42d77 | ||
|   | 31047b9ec2 | ||
|   | 714791de8f | ||
|   | c544fff2b2 | ||
|   | fc45670686 | ||
|   | 5cefa0a2ee | ||
|   | a1910d4135 | ||
|   | f1fecdde3a | ||
|   | 9ba4ea7d18 | ||
|   | 58a455d639 | ||
|   | 3ea85f6a28 | ||
|   | 4e1469ada4 | ||
|   | 5778f78f28 | ||
|   | 227125cc0b | ||
|   | b36e178c45 | ||
|   | 32c9198fb2 | ||
|   | 6983dcc267 | ||
|   | 813fcc41f0 | ||
|   | f4e9dd0f1c | ||
|   | 7f074142bf | ||
|   | b6df37628d | ||
|   | 7867eded50 | ||
|   | 311abb8a90 | ||
|   | 21303f4b05 | ||
|   | da3270af67 | ||
|   | 35aae69f23 | ||
|   | 118a2e1951 | ||
|   | 9053341581 | ||
|   | 27532a8a00 | ||
|   | 7fdfa630b5 | ||
|   | 3974d5859f | ||
|   | aa1c765c4b | ||
|   | e78385e7ea | ||
|   | 9d59b56c94 | ||
|   | 9d72dcabfc | ||
|   | a0b5d0b67e | ||
|   | 2b5520405f | ||
|   | ca376b3fcd | ||
|   | 11e3c0c547 | ||
|   | 9da136e037 | ||
|   | 9b3e59d876 | ||
|   | 7a592795b5 | ||
|   | 5b92137699 | ||
|   | 7520cdfeb4 | ||
|   | 0ada791e3a | ||
|   | 73afced4dc | ||
|   | 633a2e93bf | ||
|   | 07c4058a8c | ||
|   | b6f3938b14 | ||
|   | 57534fac96 | ||
|   | 4a03e72983 | ||
|   | ddb29ea9b1 | ||
|   | 95179c30f7 | ||
|   | f49970ce2c | ||
|   | 790818d1aa | ||
|   | 62f675e613 | ||
|   | f33434fb01 | ||
|   | 254d6aee32 | ||
|   | a5ecd597ed | ||
|   | 0fab3e940a | ||
|   | 60fbebc16b | ||
|   | ec366d8112 | ||
|   | b8818788c9 | ||
|   | e23f6f6998 | ||
|   | 05b58d76b9 | ||
|   | 644d13e3fa | ||
|   | 9de71472d4 | ||
|   | bf28227b91 | ||
|   | 4c1ee49068 | ||
|   | 6e7cf5e4c9 | ||
|   | 11f8c97347 | ||
|   | a1461fd518 | ||
|   | fa5c2e37d3 | ||
|   | 1f091b20ad | ||
|   | d3b4a03851 | ||
|   | fb12fee59b | ||
|   | 7a87d2334a | ||
|   | 9591e71138 | ||
|   | cecad526a2 | ||
|   | 53dab4ee45 | ||
|   | 8abbba46c7 | ||
|   | 0f01ac1b59 | ||
|   | aa8ab593c0 | ||
|   | 84f791220e | ||
|   | cee2c5469f | ||
|   | 6e75964a8b | ||
|   | 5ab5036504 | ||
|   | 000a3c1f7e | ||
|   | 8ea123eb94 | ||
|   | 571c42ef7d | ||
|   | 8443da0b9f | ||
|   | 7dbbcf24c8 | ||
|   | 468cb0c36b | ||
|   | 78e093df96 | ||
|   | ec4d7dab21 | ||
|   | d00ee0adea | ||
|   | 55d5ee4ed4 | ||
|   | 0e51d74265 | ||
|   | 916f3caedd | ||
|   | ff80ccce64 | ||
|   | 23f28b38e9 | ||
|   | da425a0530 | ||
|   | 79dca1608e | ||
|   | 33b615e40d | ||
|   | c825c40c4d | ||
|   | 8beb723cc2 | ||
|   | 94fd24c251 | ||
|   | bf75a8a439 | ||
|   | 36cdb05387 | ||
|   | dccc652d42 | ||
|   | 74e03a9a2e | ||
|   | 2f6df3a946 | ||
|   | 2872be6385 | ||
|   | af19e95c81 | ||
|   | e5451973bd | ||
|   | 4ef8c9d633 | ||
|   | 4a9dcb540e | ||
|   | 61eefea358 | ||
|   | f2a5512bbf | ||
|   | 2f4e114f25 | ||
|   | c91bac2527 | ||
|   | 52da7605f5 | ||
|   | 267791833e | ||
|   | 67dcf1563b | ||
|   | ccff0f5b9e | ||
|   | 9f8ad05471 | ||
|   | c2299ef8da | ||
|   | f5845564db | ||
|   | 17904d70d8 | ||
|   | 622e99e04c | ||
|   | 061420f279 | ||
|   | 3d459f1b8b | ||
|   | 5f3dd6190a | ||
|   | ac824d3af6 | ||
|   | dd25c29544 | ||
|   | 5cbdbffbb2 | ||
|   | bb81f14c2c | ||
|   | cecefd6972 | ||
|   | ff7f6a0b4c | ||
|   | 1dc9f35e12 | ||
|   | 051b63c7cc | ||
|   | aac4b9b24a | ||
|   | 1a208a20b6 | ||
|   | b1e8722ead | ||
|   | a66af6e903 | ||
|   | 0c345fc615 | ||
|   | 087b082a6b | ||
|   | 0b85209eae | ||
|   | d81bc7de46 | ||
|   | e3a99b9f89 | ||
|   | 5d319b37ea | ||
|   | 9f25606986 | ||
|   | ecd12732ee | ||
|   | 85fbde8e36 | ||
|   | 6e6c2c3efb | ||
|   | 0d4a808449 | ||
|   | 087f746647 | ||
|   | 640d66ad1a | ||
|   | f5f5ed83af | ||
|   | 95f01a1161 | ||
|   | b84e7e7d94 | ||
|   | 5d7018f3f0 | ||
|   | d87a85ceb5 | ||
|   | 9ab6e80b6f | ||
|   | 78e91e859e | ||
|   | 9eee8eade6 | ||
|   | 124ce0b8b7 | ||
|   | 00e7d96472 | ||
|   | 398815efd8 | ||
|   | bdc2bdcf56 | ||
|   | 68eafb0a7d | ||
|   | 7ca2fd7193 | ||
|   | ec823edd8f | ||
|   | 858c7a1fa7 | ||
|   | 6ac45a24fc | ||
|   | 9430b39042 | ||
|   | ae7466ccfe | ||
|   | 2c17fe5da8 | ||
|   | a0fb91af29 | ||
|   | f626e31fd3 | ||
|   | 0151a149fd | ||
|   | 9dea93142b | ||
|   | 7f878bfac0 | ||
|   | ebe9ae2341 | ||
|   | e777bbd024 | ||
|   | 2116d56124 | ||
|   | 0b6a82b018 | ||
|   | b4ea28af4e | ||
|   | 22f59712df | ||
|   | efe95f7bab | ||
|   | 200c68f67f | ||
|   | dcefec7b99 | ||
|   | 5db798bcf8 | ||
|   | 70005296cc | ||
|   | f2bf8dea93 | ||
|   | fee858c956 | ||
|   | e3ae48c8ff | ||
|   | fa9e20385e | ||
|   | f51c9704e0 | ||
|   | 57c58d81c0 | ||
|   | 1ec1082068 | ||
|   | 35b7c2269c | ||
|   | cc3e6ec6fd | ||
|   | 4df42e054d | ||
|   | 1b481e0b37 | ||
|   | 3aa4cdf540 | ||
|   | 029f277945 | ||
|   | e7e0b9adda | ||
|   | 5fbff75da8 | ||
|   | 58299a0389 | ||
|   | 1151d7e17b | ||
|   | b56ed547e3 | ||
|   | a71ebba940 | ||
|   | 4fcb516c75 | ||
|   | 22142d32d2 | ||
|   | 21194f1411 | ||
|   | 09df046fa8 | ||
|   | 63d3889d5c | ||
|   | 0ffc0559e2 | ||
|   | 78118a502c | ||
|   | 946cc3d618 | ||
|   | c40a3f18e9 | ||
|   | f01945bf8c | ||
|   | 0f72db45f9 | ||
|   | 83510341b6 | ||
|   | 70dd6593e4 | ||
|   | 60ba2db561 | ||
|   | 5820d16419 | ||
|   | 9f9ff0d1ad | ||
|   | 806161e3ac | ||
|   | 44ae9c7b63 | ||
|   | 75d24ba534 | ||
|   | 13243cd02c | ||
|   | 411fad8a45 | ||
|   | 5fe9d63c79 | ||
|   | 33095f8792 | ||
|   | 0253722369 | ||
|   | 495c45564a | ||
|   | 8517b43e85 | ||
|   | 033ea4e7dc | ||
|   | a0c9e5ad26 | ||
|   | 408d6eafcc | ||
|   | 054e357483 | ||
|   | cb520bff23 | ||
|   | 024ebe0026 | ||
|   | 7b62e2f07b | ||
|   | 7d52b3ba01 | ||
|   | 46caa23319 | ||
|   | 9aa5eda2c8 | ||
|   | f48182a69c | ||
|   | 788f883490 | ||
|   | e84e82d018 | ||
|   | 20e73796b8 | ||
|   | 7769d6fff1 | ||
|   | 561e80c2be | ||
|   | 96f47a4c32 | ||
|   | 7482d6dd45 | ||
|   | aea31ee6dd | ||
|   | de43965ecb | ||
|   | baa61c6aa0 | ||
|   | cb22dafb3c | ||
|   | ea26784c3e | ||
|   | 72332ed40f | ||
|   | 46f2bf16a8 | ||
|   | e2725f8033 | ||
|   | 9084ac119f | ||
|   | 41943ba61a | ||
|   | 33794669a1 | ||
|   | fe155a4ff0 | ||
|   | 124e487ef7 | ||
|   | f361916a60 | ||
|   | 20afa1544b | ||
|   | c08d5af4db | ||
|   | dc341c8af8 | ||
|   | 2507b52adb | ||
|   | 1302708135 | ||
|   | 1314812f92 | ||
|   | f739e3ed11 | ||
|   | abb526fc0f | ||
|   | efb1a24b8f | ||
|   | bc0835963d | ||
|   | 316190dff8 | ||
|   | 029ead0c7c | ||
|   | a85172f30b | ||
|   | dfe2532813 | ||
|   | cf3bb23629 | ||
|   | 2132042aca | ||
|   | 19e448fc54 | ||
|   | a4e0fb8e99 | ||
|   | 5b72e2887e | ||
|   | d2b6ec1b7e | ||
|   | 4b541a23c4 | ||
|   | 99869449ae | ||
|   | eab73f3895 | ||
|   | 9e96615ffa | ||
|   | 350010feb5 | ||
|   | 7395e4620b | ||
|   | 7d91ae4513 | ||
|   | 343f759983 | ||
|   | 24ee3f8cc0 | ||
|   | c143eadb62 | ||
|   | e7df38f4d1 | ||
|   | 3e42318ac8 | ||
|   | c6e5d2932e | ||
|   | 1aaf21a350 | ||
|   | f185eece8a | ||
|   | 9d951280ef | ||
|   | 3f598bafc0 | ||
|   | cddd859f56 | ||
|   | e7adf50ec1 | ||
|   | ac437f809a | ||
|   | f13dee9b9d | ||
|   | 00855c0909 | ||
|   | 1fafed5a07 | ||
|   | 7adb81b350 | ||
|   | 4647035b00 | ||
|   | 8ad7344e02 | ||
|   | f1c46b3385 | ||
|   | 7f84073b12 | ||
|   | e383a11bb7 | ||
|   | cc113e2251 | ||
|   | c5a3830c7d | ||
|   | a2abadc970 | ||
|   | db444b89d3 | ||
|   | 77881e8a58 | ||
|   | 0b15f88da3 | ||
|   | 7c6bf96f6f | ||
|   | dc77e2d8d9 | ||
|   | 68824fab4f | ||
|   | d6b3a36714 | ||
|   | 8ab1f703c7 | ||
|   | 95a4e292aa | ||
|   | 3b9252558f | ||
|   | 4a324dccc6 | ||
|   | 8fffb0f8b5 | ||
|   | 87adfce211 | ||
|   | 297813f6e6 | ||
|   | 362315852a | ||
|   | d221f36cf8 | ||
|   | 9e18589b6b | ||
|   | c4d09210e1 | ||
|   | 43797c5eb5 | ||
|   | fe38fe94dc | ||
|   | f185291eca | ||
|   | 7541ae6476 | ||
|   | d94715be2b | ||
|   | 99cc5972c8 | ||
|   | 3d101a24a1 | ||
|   | 2ed3ddf05b | ||
|   | 10b3658bd7 | ||
|   | 9f5903089e | ||
|   | 0593885ed4 | ||
|   | 3efbe11d49 | ||
|   | 1c2e0e5749 | ||
|   | f64da6a547 | ||
|   | 94fba7e175 | ||
|   | a59245e6bb | ||
|   | 217c1acc62 | ||
|   | 2c0a68bd8f | ||
|   | e37ffd6107 | ||
|   | 3bde598fa7 | ||
|   | 53f42ff934 | ||
|   | 9041eb9e9a | ||
|   | 70ac395232 | ||
|   | 82f68b4a7b | ||
|   | 2b2f3214e9 | ||
|   | 1c0d63a02e | ||
|   | de77215630 | ||
|   | f300b843c1 | ||
|   | 0bb81136bb | ||
|   | 2a81ced817 | ||
|   | 7363951a9a | ||
|   | 6f770b78af | ||
|   | 10219a348f | ||
|   | 23d1013cfa | ||
|   | 05980d4147 | ||
|   | e5e25c895f | ||
|   | b486883ff6 | ||
|   | 42dd4d9557 | ||
|   | 7dff9e09a7 | ||
|   | c315b026a3 | ||
|   | a4ba4c80e8 | ||
|   | ccd48b63a2 | ||
|   | 6d5f70ced6 | ||
|   | ccffb4b786 | ||
|   | 68dbbe212c | ||
|   | 5df869e08a | ||
|   | 63b9e023b4 | ||
|   | 8f357739ec | ||
|   | 808fc0f8b6 | ||
|   | 1a6f6085e6 | ||
|   | 0de3e9a233 | ||
|   | f1237f124f | ||
|   | 69142b6fb0 | ||
|   | 28f295a1e2 | ||
|   | 55c2127baa | ||
|   | 265c36b345 | ||
|   | 9f081fe32f | ||
|   | e4fb6ad727 | ||
|   | 1040a1624a | ||
|   | a2ee2852a0 | ||
|   | b2e3b726d9 | ||
|   | 0f4e557552 | ||
|   | 2efa9f9483 | ||
|   | 43e6ca8f4a | ||
|   | 34d67a7bcd | ||
|   | 5a6051f9a1 | ||
|   | 157e48f946 | ||
|   | 9469a258ff | ||
|   | fd0aeb5341 | ||
|   | 4d4a4ce043 | ||
|   | 678f77cc05 | ||
|   | 6c30248389 | ||
|   | fda7c1cf11 | ||
|   | 364e5ec0b8 | ||
|   | 947bf7799c | ||
|   | e22836d706 | ||
|   | 6c8fcbfb80 | ||
|   | f1fe1877fe | ||
|   | 3c0831c8eb | ||
|   | 35b3f364c9 | ||
|   | c4299b51cd | ||
|   | 31caed20fa | ||
|   | 41fed656c1 | ||
|   | c5ee2ebc49 | ||
|   | 743a218219 | ||
|   | 093ef17fb7 | ||
|   | a41912be0a | ||
|   | 5becd51b50 | ||
|   | ef7a375396 | ||
|   | 19879e3287 | ||
|   | d1c4f342fc | ||
|   | 2f62b7046c | ||
|   | 0cca8f522b | ||
|   | 39decec001 | ||
|   | 3489db2768 | ||
|   | 3382688669 | ||
|   | cf00ce7d78 | ||
|   | 2c714aa003 | ||
|   | 1e7858bf06 | ||
|   | 4e428c2e41 | ||
|   | b95ab3e95a | ||
|   | 0dd7f8fbaa | ||
|   | a2789ac540 | ||
|   | a785e10a3f | ||
|   | 10dad5a209 | ||
|   | 9327b24d44 | ||
|   | 7d02bb2fe9 | ||
|   | a2d3ee0d67 | ||
|   | d29fab69e8 | ||
|   | 6205f40298 | ||
|   | 6b169f3f17 | ||
|   | 0d4a5a7ffb | ||
|   | dac90d29dd | ||
|   | 7e815633e7 | ||
|   | f062f31ca2 | ||
|   | 1374f90433 | ||
|   | b692b19a4d | ||
|   | 92d5b14cf5 | ||
|   | 6a84829c16 | ||
|   | 7036ecbd0a | ||
|   | 19b5059972 | ||
|   | cebc377fa7 | ||
|   | d36c3919d7 | ||
|   | 0684427373 | ||
|   | 8ff79e85bf | ||
|   | ee4b28a490 | ||
|   | fddd5b8860 | ||
|   | 72279072ac | ||
|   | 0b70448273 | ||
|   | 4eb24fcbc5 | ||
|   | 06edf59d14 | ||
|   | 36ca851bc2 | ||
|   | a4e453bf83 | ||
|   | d211eec66f | ||
|   | db8540d4ab | ||
|   | 30e270e7c0 | ||
|   | 9734307551 | ||
|   | c650f8d1e1 | ||
|   | 10005898f8 | ||
|   | 716389e0c1 | ||
|   | 658729feb5 | ||
|   | ae7808eb2a | ||
|   | d8e0e9e0b0 | ||
|   | a860a3c122 | ||
|   | fe60d526b9 | ||
|   | 769904778f | ||
|   | a3a40c79d6 | ||
|   | b44f613136 | ||
|   | 801be9c60b | ||
|   | b6db6a1287 | ||
|   | 4181174bcc | ||
|   | 3be46e6011 | ||
|   | 98b93efc5c | ||
|   | 6156019c2f | ||
|   | 80d60148a9 | ||
|   | 8baf59a608 | ||
|   | b546365aaa | ||
|   | 0a68698912 | ||
|   | 45288a2491 | ||
|   | f34a175e4f | ||
|   | 6e7e145822 | ||
|   | 9abebe2d5d | ||
|   | b0c5884c3f | ||
|   | a79e6a8eea | ||
|   | c1f1aed9ca | ||
|   | 65b0e17b5b | ||
|   | 6947131b47 | ||
|   | 914dd53da0 | ||
|   | 58616ef686 | ||
|   | 563e0c1e0e | ||
|   | 437070fd7a | ||
|   | baa9cf451c | ||
|   | c2918d4519 | ||
|   | 1efdcd4691 | ||
|   | 2a43087ed7 | ||
|   | 5716324934 | ||
|   | ae267e0380 | ||
|   | 3918a2a228 | ||
|   | e375fc36d3 | ||
|   | f5e29b4651 | ||
|   | 524d875516 | ||
|   | 60bdc00ce9 | ||
|   | 073166190f | ||
|   | b80e4d7d70 | ||
|   | cc434e27cf | ||
|   | 8377e04b62 | ||
|   | 0a47fb9c83 | ||
|   | a5d3c850e9 | ||
|   | d6391f62be | ||
|   | c6f302e448 | ||
|   | 9706022c21 | ||
|   | 1d858f4920 | ||
|   | e09ba30d46 | ||
|   | 38ec3d14ed | ||
|   | 8ee9380cc7 | ||
|   | 6e74e4c008 | ||
|   | 5ebc58851b | ||
|   | 16b09bbfc5 | ||
|   | d4b5fc79f4 | ||
|   | e51c044ccd | ||
|   | d3b1ba81f7 | ||
|   | 26f55f02c0 | ||
|   | 8050707ff9 | ||
|   | 46252030cf | ||
|   | 681fa835ef | ||
|   | d6560eb976 | ||
|   | 3770b307af | ||
|   | 0dacbb31be | ||
|   | bbdbd756a7 | ||
|   | 508e38e622 | ||
|   | ffe45d0d02 | ||
|   | 9206d1acf8 | ||
|   | da867ef8ef | ||
|   | 4826201e51 | ||
|   | 463c97f9e7 | ||
|   | 3983928c6c | ||
|   | 15e626027f | ||
|   | d46810752e | ||
|   | 3d10b502a0 | ||
|   | 433c5cef3b | ||
|   | 697caf553a | ||
|   | 1e11359c71 | ||
|   | 5285431825 | ||
|   | 7743a572a9 | ||
|   | 3b974920d3 | ||
|   | 6bc9792248 | ||
|   | da55f6fb10 | ||
|   | ffa90a3407 | ||
|   | 0a13ea3743 | ||
|   | 0e2e588145 | ||
|   | b8c50fee36 | ||
|   | 8cb0b7c498 | ||
|   | 699fcdafba | ||
|   | b4d5aeb5d0 | ||
|   | d067dd643e | ||
|   | 65a2bf2d18 | ||
|   | e826e8184f | ||
|   | dacbde7d77 | ||
|   | 5b0587b672 | ||
|   | f0320c0f6d | ||
|   | e05c32df25 | ||
|   | 9c40c32e95 | ||
|   | ac60de0360 | ||
|   | 587047f9d6 | ||
|   | e815223047 | ||
|   | b6fb5ab950 | ||
|   | a0906937c4 | ||
|   | 07c47df369 | ||
|   | 85e9a949cc | ||
|   | 3933fb0664 | ||
|   | a885fbdb41 | ||
|   | 210793eb34 | ||
|   | 0235c7bce0 | ||
|   | 4419c0fc6c | ||
|   | 2f3701693d | ||
|   | 3bf446cbdb | ||
|   | 0c67cc13a1 | ||
|   | 0b80d7b6f4 | ||
|   | 23c35d4c80 | ||
|   | e939c29efa | ||
|   | ea0655b4e5 | ||
|   | 4117ce2e86 | ||
|   | dec04386bf | ||
|   | b50756785e | ||
|   | b9538bdc67 | ||
|   | a928281bbe | ||
|   | 4533d17e27 | ||
|   | 546df6d001 | ||
|   | f14eef62ae | ||
|   | ee86770570 | ||
|   | 385a4e9f6f | ||
|   | 142cdcffca | ||
|   | eb6c753514 | ||
|   | c3b62c80fb | ||
|   | f77e176a6e | ||
|   | 3f99dec858 | ||
|   | 81b0cf55b0 | ||
|   | 1d5d2dc731 | ||
|   | 04f5ee0a80 | ||
|   | 7a02777cfb | ||
|   | 7257c44d27 | ||
|   | cb15602814 | ||
|   | 0f2c333484 | ||
|   | 6f2cf2ef85 | ||
|   | 70a721a47d | ||
|   | b32947af98 | ||
|   | 94b44ec7fe | ||
|   | 5c8aa71c31 | ||
|   | a6c424b7c8 | ||
|   | 38e40c342d | ||
|   | 26d390b66e | ||
|   | baddafa552 | ||
|   | f443d3052b | ||
|   | 8fc27ff28e | ||
|   | 3784d759f5 | ||
|   | 61037f3852 | ||
|   | db8aaecdbe | ||
|   | 15a4541595 | ||
|   | 50ae8e2335 | ||
|   | 279df17ba4 | ||
|   | f8e6362283 | ||
|   | 0c44064926 | ||
|   | 73c437574c | ||
|   | 69a2182c04 | ||
|   | ce80e6cd32 | ||
|   | 054def09f7 | ||
|   | eebe90bd14 | ||
|   | 6ea280ce60 | ||
|   | e992b70f92 | ||
|   | 0f58bb35ba | ||
|   | 56abfb6adc | ||
|   | 8352d61f8d | ||
|   | 51d585f299 | ||
|   | d017a52922 | ||
|   | 78ec0d1314 | ||
|   | c84151e9e8 | ||
|   | e8e599cb8c | ||
|   | 232b9ea239 | ||
|   | 1c49351e66 | ||
|   | 34d1f4725d | ||
|   | 7cd81dcc95 | ||
|   | 1bdd3d88de | ||
|   | d105552fa9 | ||
|   | b5af35bd6c | ||
|   | 7d46487491 | ||
|   | 38a599011e | ||
|   | e59e2fc8d7 | ||
|   | b9ce405ada | ||
|   | d7df423deb | ||
|   | 99eea99e93 | ||
|   | 63d82ce03e | ||
|   | 13a2c1ecd9 | ||
|   | 627ab4ee81 | ||
|   | 54f45539be | 
							
								
								
									
										51
									
								
								.devcontainer/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								.devcontainer/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | |||||||
|  | FROM python:3.7 | ||||||
|  |  | ||||||
|  | WORKDIR /workspaces | ||||||
|  |  | ||||||
|  | # Install Node/Yarn for Frontent | ||||||
|  | RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||||
|  |         curl \ | ||||||
|  |         git \ | ||||||
|  |         apt-utils \ | ||||||
|  |         apt-transport-https \ | ||||||
|  |     && curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \ | ||||||
|  |     && echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \ | ||||||
|  |     && apt-get update && apt-get install -y --no-install-recommends \ | ||||||
|  |         nodejs \ | ||||||
|  |         yarn \ | ||||||
|  |     && curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \ | ||||||
|  |     && rm -rf /var/lib/apt/lists/* | ||||||
|  | ENV NVM_DIR /root/.nvm | ||||||
|  |  | ||||||
|  | # Install docker | ||||||
|  | # https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/ | ||||||
|  | RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||||
|  |         apt-transport-https \ | ||||||
|  |         ca-certificates \ | ||||||
|  |         curl \ | ||||||
|  |         software-properties-common \ | ||||||
|  |         gpg-agent \ | ||||||
|  |     && curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \ | ||||||
|  |     && add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \ | ||||||
|  |     && apt-get update && apt-get install -y --no-install-recommends \ | ||||||
|  |         docker-ce \ | ||||||
|  |         docker-ce-cli \ | ||||||
|  |         containerd.io \ | ||||||
|  |     && rm -rf /var/lib/apt/lists/* | ||||||
|  |  | ||||||
|  | # Install tools | ||||||
|  | RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||||
|  |         jq \ | ||||||
|  |         dbus \ | ||||||
|  |         network-manager \ | ||||||
|  |         libpulse0 \ | ||||||
|  |     && rm -rf /var/lib/apt/lists/* | ||||||
|  |  | ||||||
|  | # Install Python dependencies from requirements.txt if it exists | ||||||
|  | COPY requirements.txt requirements_tests.txt ./ | ||||||
|  | RUN pip3 install -r requirements.txt -r requirements_tests.txt \ | ||||||
|  |     && pip3 install tox \ | ||||||
|  |     && rm -f requirements.txt requirements_tests.txt | ||||||
|  |  | ||||||
|  | # Set the default shell to bash instead of sh | ||||||
|  | ENV SHELL /bin/bash | ||||||
							
								
								
									
										24
									
								
								.devcontainer/devcontainer.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								.devcontainer/devcontainer.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | |||||||
|  | // See https://aka.ms/vscode-remote/devcontainer.json for format details. | ||||||
|  | { | ||||||
|  |   "name": "Supervisor dev", | ||||||
|  |   "context": "..", | ||||||
|  |   "dockerFile": "Dockerfile", | ||||||
|  |   "appPort": "9123:8123", | ||||||
|  |   "runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"], | ||||||
|  |   "extensions": [ | ||||||
|  |     "ms-python.python", | ||||||
|  |     "visualstudioexptteam.vscodeintellicode", | ||||||
|  |     "esbenp.prettier-vscode" | ||||||
|  |   ], | ||||||
|  |   "settings": { | ||||||
|  |     "python.pythonPath": "/usr/local/bin/python", | ||||||
|  |     "python.linting.pylintEnabled": true, | ||||||
|  |     "python.linting.enabled": true, | ||||||
|  |     "python.formatting.provider": "black", | ||||||
|  |     "python.formatting.blackArgs": ["--target-version", "py37"], | ||||||
|  |     "editor.formatOnPaste": false, | ||||||
|  |     "editor.formatOnSave": true, | ||||||
|  |     "editor.formatOnType": true, | ||||||
|  |     "files.trimTrailingWhitespace": true | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -1,9 +1,23 @@ | |||||||
| # General files | # General files | ||||||
| .git | .git | ||||||
| .github | .github | ||||||
|  | .devcontainer | ||||||
|  | .vscode | ||||||
|  |  | ||||||
| # Test related files | # Test related files | ||||||
| .tox | .tox | ||||||
|  |  | ||||||
| # Temporary files | # Temporary files | ||||||
| **/__pycache__ | **/__pycache__ | ||||||
|  | .pytest_cache | ||||||
|  |  | ||||||
|  | # virtualenv | ||||||
|  | venv/ | ||||||
|  |  | ||||||
|  | # Data | ||||||
|  | home-assistant-polymer/ | ||||||
|  | script/ | ||||||
|  | tests/ | ||||||
|  |  | ||||||
|  | # Test ENV | ||||||
|  | data/ | ||||||
|   | |||||||
							
								
								
									
										29
									
								
								.github/ISSUE_TEMPLATE.md
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								.github/ISSUE_TEMPLATE.md
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | |||||||
|  | <!-- READ THIS FIRST: | ||||||
|  | - If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/ | ||||||
|  | - Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases | ||||||
|  | - Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues | ||||||
|  | - This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests | ||||||
|  | - Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template! | ||||||
|  | - If you have a problem with a Add-on, make a issue on there repository. | ||||||
|  | --> | ||||||
|  |  | ||||||
|  | **Home Assistant release with the issue:** | ||||||
|  | <!-- | ||||||
|  | - Frontend -> Developer tools -> Info | ||||||
|  | - Or use this command: hass --version | ||||||
|  | --> | ||||||
|  |  | ||||||
|  | **Operating environment (HassOS/Generic):** | ||||||
|  | <!-- | ||||||
|  | Please provide details about your environment. | ||||||
|  | --> | ||||||
|  |  | ||||||
|  | **Supervisor logs:** | ||||||
|  | <!-- | ||||||
|  | - Frontend -> Hass.io -> System | ||||||
|  | - Or use this command: hassio su logs | ||||||
|  | --> | ||||||
|  |  | ||||||
|  |  | ||||||
|  | **Description of problem:** | ||||||
|  |  | ||||||
							
								
								
									
										27
									
								
								.github/lock.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								.github/lock.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,27 @@ | |||||||
|  | # Configuration for Lock Threads - https://github.com/dessant/lock-threads | ||||||
|  |  | ||||||
|  | # Number of days of inactivity before a closed issue or pull request is locked | ||||||
|  | daysUntilLock: 1 | ||||||
|  |  | ||||||
|  | # Skip issues and pull requests created before a given timestamp. Timestamp must | ||||||
|  | # follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable | ||||||
|  | skipCreatedBefore: 2020-01-01 | ||||||
|  |  | ||||||
|  | # Issues and pull requests with these labels will be ignored. Set to `[]` to disable | ||||||
|  | exemptLabels: [] | ||||||
|  |  | ||||||
|  | # Label to add before locking, such as `outdated`. Set to `false` to disable | ||||||
|  | lockLabel: false | ||||||
|  |  | ||||||
|  | # Comment to post before locking. Set to `false` to disable | ||||||
|  | lockComment: false | ||||||
|  |  | ||||||
|  | # Assign `resolved` as the reason for locking. Set to `false` to disable | ||||||
|  | setLockReason: false | ||||||
|  |  | ||||||
|  | # Limit to only `issues` or `pulls` | ||||||
|  | only: pulls | ||||||
|  |  | ||||||
|  | # Optionally, specify configuration settings just for `issues` or `pulls` | ||||||
|  | issues: | ||||||
|  |    daysUntilLock: 30 | ||||||
							
								
								
									
										13
									
								
								.github/move.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								.github/move.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | # Configuration for move-issues - https://github.com/dessant/move-issues | ||||||
|  |  | ||||||
|  | # Delete the command comment. Ignored when the comment also contains other content | ||||||
|  | deleteCommand: true | ||||||
|  | # Close the source issue after moving | ||||||
|  | closeSourceIssue: true | ||||||
|  | # Lock the source issue after moving | ||||||
|  | lockSourceIssue: false | ||||||
|  | # Set custom aliases for targets | ||||||
|  | # aliases: | ||||||
|  | #   r: repo | ||||||
|  | #   or: owner/repo | ||||||
|  |  | ||||||
							
								
								
									
										4
									
								
								.github/release-drafter.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								.github/release-drafter.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | |||||||
|  | template: | | ||||||
|  |   ## What's Changed | ||||||
|  |  | ||||||
|  |   $CHANGES | ||||||
							
								
								
									
										17
									
								
								.github/stale.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								.github/stale.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | |||||||
|  | # Number of days of inactivity before an issue becomes stale | ||||||
|  | daysUntilStale: 60 | ||||||
|  | # Number of days of inactivity before a stale issue is closed | ||||||
|  | daysUntilClose: 7 | ||||||
|  | # Issues with these labels will never be considered stale | ||||||
|  | exemptLabels: | ||||||
|  |   - pinned | ||||||
|  |   - security | ||||||
|  | # Label to use when marking an issue as stale | ||||||
|  | staleLabel: wontfix | ||||||
|  | # Comment to post when marking an issue as stale. Set to `false` to disable | ||||||
|  | markComment: > | ||||||
|  |   This issue has been automatically marked as stale because it has not had | ||||||
|  |   recent activity. It will be closed if no further activity occurs. Thank you | ||||||
|  |   for your contributions. | ||||||
|  | # Comment to post when closing a stale issue. Set to `false` to disable | ||||||
|  | closeComment: false | ||||||
							
								
								
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -90,3 +90,9 @@ ENV/ | |||||||
|  |  | ||||||
| # pylint | # pylint | ||||||
| .pylint.d/ | .pylint.d/ | ||||||
|  |  | ||||||
|  | # VS Code | ||||||
|  | .vscode/* | ||||||
|  | !.vscode/cSpell.json | ||||||
|  | !.vscode/tasks.json | ||||||
|  | !.vscode/launch.json | ||||||
|   | |||||||
							
								
								
									
										1
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +1,4 @@ | |||||||
| [submodule "home-assistant-polymer"] | [submodule "home-assistant-polymer"] | ||||||
| 	path = home-assistant-polymer | 	path = home-assistant-polymer | ||||||
| 	url = https://github.com/home-assistant/home-assistant-polymer | 	url = https://github.com/home-assistant/home-assistant-polymer | ||||||
|  | 	branch = dev | ||||||
|   | |||||||
							
								
								
									
										5
									
								
								.hadolint.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								.hadolint.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | |||||||
|  | ignored: | ||||||
|  |   - DL3018 | ||||||
|  |   - DL3006 | ||||||
|  |   - DL3013 | ||||||
|  |   - SC2155 | ||||||
							
								
								
									
										12
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,12 +0,0 @@ | |||||||
| sudo: false |  | ||||||
| matrix: |  | ||||||
|   fast_finish: true |  | ||||||
|   include: |  | ||||||
|     - python: "3.6" |  | ||||||
|  |  | ||||||
| cache: |  | ||||||
|   directories: |  | ||||||
|     - $HOME/.cache/pip |  | ||||||
| install: pip install -U tox |  | ||||||
| language: python |  | ||||||
| script: tox |  | ||||||
							
								
								
									
										18
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | |||||||
|  | { | ||||||
|  |   "version": "0.2.0", | ||||||
|  |   "configurations": [ | ||||||
|  |     { | ||||||
|  |       "name": "Supervisor remote debug", | ||||||
|  |       "type": "python", | ||||||
|  |       "request": "attach", | ||||||
|  |       "port": 33333, | ||||||
|  |       "host": "172.30.32.2", | ||||||
|  |       "pathMappings": [ | ||||||
|  |         { | ||||||
|  |           "localRoot": "${workspaceFolder}", | ||||||
|  |           "remoteRoot": "/usr/src/supervisor" | ||||||
|  |         } | ||||||
|  |       ] | ||||||
|  |     } | ||||||
|  |   ] | ||||||
|  | } | ||||||
							
								
								
									
										90
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										90
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,90 @@ | |||||||
|  | { | ||||||
|  |   "version": "2.0.0", | ||||||
|  |   "tasks": [ | ||||||
|  |     { | ||||||
|  |       "label": "Run Testenv", | ||||||
|  |       "type": "shell", | ||||||
|  |       "command": "./scripts/test_env.sh", | ||||||
|  |       "group": { | ||||||
|  |         "kind": "test", | ||||||
|  |         "isDefault": true | ||||||
|  |       }, | ||||||
|  |       "presentation": { | ||||||
|  |         "reveal": "always", | ||||||
|  |         "panel": "new" | ||||||
|  |       }, | ||||||
|  |       "problemMatcher": [] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "label": "Run Testenv CLI", | ||||||
|  |       "type": "shell", | ||||||
|  |       "command": "docker exec -ti hassio_cli /usr/bin/cli.sh", | ||||||
|  |       "group": { | ||||||
|  |         "kind": "test", | ||||||
|  |         "isDefault": true | ||||||
|  |       }, | ||||||
|  |       "presentation": { | ||||||
|  |         "reveal": "always", | ||||||
|  |         "panel": "new" | ||||||
|  |       }, | ||||||
|  |       "problemMatcher": [] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "label": "Update UI", | ||||||
|  |       "type": "shell", | ||||||
|  |       "command": "./scripts/update-frontend.sh", | ||||||
|  |       "group": { | ||||||
|  |         "kind": "build", | ||||||
|  |         "isDefault": true | ||||||
|  |       }, | ||||||
|  |       "presentation": { | ||||||
|  |         "reveal": "always", | ||||||
|  |         "panel": "new" | ||||||
|  |       }, | ||||||
|  |       "problemMatcher": [] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "label": "Pytest", | ||||||
|  |       "type": "shell", | ||||||
|  |       "command": "pytest --timeout=10 tests", | ||||||
|  |       "group": { | ||||||
|  |         "kind": "test", | ||||||
|  |         "isDefault": true | ||||||
|  |       }, | ||||||
|  |       "presentation": { | ||||||
|  |         "reveal": "always", | ||||||
|  |         "panel": "new" | ||||||
|  |       }, | ||||||
|  |       "problemMatcher": [] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "label": "Flake8", | ||||||
|  |       "type": "shell", | ||||||
|  |       "command": "flake8 hassio tests", | ||||||
|  |       "group": { | ||||||
|  |         "kind": "test", | ||||||
|  |         "isDefault": true | ||||||
|  |       }, | ||||||
|  |       "presentation": { | ||||||
|  |         "reveal": "always", | ||||||
|  |         "panel": "new" | ||||||
|  |       }, | ||||||
|  |       "problemMatcher": [] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "label": "Pylint", | ||||||
|  |       "type": "shell", | ||||||
|  |       "command": "pylint hassio", | ||||||
|  |       "dependsOn": ["Install all Requirements"], | ||||||
|  |       "group": { | ||||||
|  |         "kind": "test", | ||||||
|  |         "isDefault": true | ||||||
|  |       }, | ||||||
|  |       "presentation": { | ||||||
|  |         "reveal": "always", | ||||||
|  |         "panel": "new" | ||||||
|  |       }, | ||||||
|  |       "problemMatcher": [] | ||||||
|  |     } | ||||||
|  |   ] | ||||||
|  | } | ||||||
							
								
								
									
										53
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										53
									
								
								Dockerfile
									
									
									
									
									
								
							| @@ -1,27 +1,40 @@ | |||||||
| ARG BUILD_FROM | ARG BUILD_FROM | ||||||
| FROM $BUILD_FROM | FROM $BUILD_FROM | ||||||
|  |  | ||||||
| # Add env | ENV \ | ||||||
| ENV LANG C.UTF-8 |     S6_SERVICES_GRACETIME=10000 | ||||||
|  |  | ||||||
| # Setup base | # Install base | ||||||
| RUN apk add --no-cache \ | RUN \ | ||||||
|         python3 \ |     apk add --no-cache \ | ||||||
|  |         eudev \ | ||||||
|  |         eudev-libs \ | ||||||
|         git \ |         git \ | ||||||
|         socat \ |         glib \ | ||||||
|         libstdc++ \ |         libffi \ | ||||||
|     && apk add --no-cache --virtual .build-dependencies \ |         libpulse \ | ||||||
|         make \ |         musl \ | ||||||
|         python3-dev \ |         openssl \ | ||||||
|         g++ \ |         socat | ||||||
|     && pip3 install --no-cache-dir \ |  | ||||||
|         uvloop \ |  | ||||||
|         cchardet \ |  | ||||||
|     && apk del .build-dependencies |  | ||||||
|  |  | ||||||
| # Install HassIO | ARG BUILD_ARCH | ||||||
| COPY . /usr/src/hassio | WORKDIR /usr/src | ||||||
| RUN pip3 install --no-cache-dir /usr/src/hassio \ |  | ||||||
|     && rm -rf /usr/src/hassio |  | ||||||
|  |  | ||||||
| CMD [ "python3", "-m", "hassio" ] | # Install requirements | ||||||
|  | COPY requirements.txt . | ||||||
|  | RUN \ | ||||||
|  |     export MAKEFLAGS="-j$(nproc)" \ | ||||||
|  |     && pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \ | ||||||
|  |         "https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \ | ||||||
|  |         -r ./requirements.txt \ | ||||||
|  |     && rm -f requirements.txt | ||||||
|  |  | ||||||
|  | # Install Home Assistant Supervisor | ||||||
|  | COPY . supervisor | ||||||
|  | RUN \ | ||||||
|  |     pip3 install --no-cache-dir -e ./supervisor \ | ||||||
|  |     && python3 -m compileall ./supervisor/supervisor | ||||||
|  |  | ||||||
|  |  | ||||||
|  | WORKDIR / | ||||||
|  | COPY rootfs / | ||||||
|   | |||||||
							
								
								
									
										811
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										811
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,201 +1,674 @@ | |||||||
|                                  Apache License |                     GNU GENERAL PUBLIC LICENSE | ||||||
|                            Version 2.0, January 2004 |                        Version 3, 29 June 2007 | ||||||
|                         http://www.apache.org/licenses/ |  | ||||||
|  |  | ||||||
|    TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION |  Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> | ||||||
|  |  Everyone is permitted to copy and distribute verbatim copies | ||||||
|  |  of this license document, but changing it is not allowed. | ||||||
|  |  | ||||||
|    1. Definitions. |                             Preamble | ||||||
|  |  | ||||||
|       "License" shall mean the terms and conditions for use, reproduction, |   The GNU General Public License is a free, copyleft license for | ||||||
|       and distribution as defined by Sections 1 through 9 of this document. | software and other kinds of works. | ||||||
|  |  | ||||||
|       "Licensor" shall mean the copyright owner or entity authorized by |   The licenses for most software and other practical works are designed | ||||||
|       the copyright owner that is granting the License. | to take away your freedom to share and change the works.  By contrast, | ||||||
|  | the GNU General Public License is intended to guarantee your freedom to | ||||||
|  | share and change all versions of a program--to make sure it remains free | ||||||
|  | software for all its users.  We, the Free Software Foundation, use the | ||||||
|  | GNU General Public License for most of our software; it applies also to | ||||||
|  | any other work released this way by its authors.  You can apply it to | ||||||
|  | your programs, too. | ||||||
|  |  | ||||||
|       "Legal Entity" shall mean the union of the acting entity and all |   When we speak of free software, we are referring to freedom, not | ||||||
|       other entities that control, are controlled by, or are under common | price.  Our General Public Licenses are designed to make sure that you | ||||||
|       control with that entity. For the purposes of this definition, | have the freedom to distribute copies of free software (and charge for | ||||||
|       "control" means (i) the power, direct or indirect, to cause the | them if you wish), that you receive source code or can get it if you | ||||||
|       direction or management of such entity, whether by contract or | want it, that you can change the software or use pieces of it in new | ||||||
|       otherwise, or (ii) ownership of fifty percent (50%) or more of the | free programs, and that you know you can do these things. | ||||||
|       outstanding shares, or (iii) beneficial ownership of such entity. |  | ||||||
|  |  | ||||||
|       "You" (or "Your") shall mean an individual or Legal Entity |   To protect your rights, we need to prevent others from denying you | ||||||
|       exercising permissions granted by this License. | these rights or asking you to surrender the rights.  Therefore, you have | ||||||
|  | certain responsibilities if you distribute copies of the software, or if | ||||||
|  | you modify it: responsibilities to respect the freedom of others. | ||||||
|  |  | ||||||
|       "Source" form shall mean the preferred form for making modifications, |   For example, if you distribute copies of such a program, whether | ||||||
|       including but not limited to software source code, documentation | gratis or for a fee, you must pass on to the recipients the same | ||||||
|       source, and configuration files. | freedoms that you received.  You must make sure that they, too, receive | ||||||
|  | or can get the source code.  And you must show them these terms so they | ||||||
|  | know their rights. | ||||||
|  |  | ||||||
|       "Object" form shall mean any form resulting from mechanical |   Developers that use the GNU GPL protect your rights with two steps: | ||||||
|       transformation or translation of a Source form, including but | (1) assert copyright on the software, and (2) offer you this License | ||||||
|       not limited to compiled object code, generated documentation, | giving you legal permission to copy, distribute and/or modify it. | ||||||
|       and conversions to other media types. |  | ||||||
|  |  | ||||||
|       "Work" shall mean the work of authorship, whether in Source or |   For the developers' and authors' protection, the GPL clearly explains | ||||||
|       Object form, made available under the License, as indicated by a | that there is no warranty for this free software.  For both users' and | ||||||
|       copyright notice that is included in or attached to the work | authors' sake, the GPL requires that modified versions be marked as | ||||||
|       (an example is provided in the Appendix below). | changed, so that their problems will not be attributed erroneously to | ||||||
|  | authors of previous versions. | ||||||
|  |  | ||||||
|       "Derivative Works" shall mean any work, whether in Source or Object |   Some devices are designed to deny users access to install or run | ||||||
|       form, that is based on (or derived from) the Work and for which the | modified versions of the software inside them, although the manufacturer | ||||||
|       editorial revisions, annotations, elaborations, or other modifications | can do so.  This is fundamentally incompatible with the aim of | ||||||
|       represent, as a whole, an original work of authorship. For the purposes | protecting users' freedom to change the software.  The systematic | ||||||
|       of this License, Derivative Works shall not include works that remain | pattern of such abuse occurs in the area of products for individuals to | ||||||
|       separable from, or merely link (or bind by name) to the interfaces of, | use, which is precisely where it is most unacceptable.  Therefore, we | ||||||
|       the Work and Derivative Works thereof. | have designed this version of the GPL to prohibit the practice for those | ||||||
|  | products.  If such problems arise substantially in other domains, we | ||||||
|  | stand ready to extend this provision to those domains in future versions | ||||||
|  | of the GPL, as needed to protect the freedom of users. | ||||||
|  |  | ||||||
|       "Contribution" shall mean any work of authorship, including |   Finally, every program is threatened constantly by software patents. | ||||||
|       the original version of the Work and any modifications or additions | States should not allow patents to restrict development and use of | ||||||
|       to that Work or Derivative Works thereof, that is intentionally | software on general-purpose computers, but in those that do, we wish to | ||||||
|       submitted to Licensor for inclusion in the Work by the copyright owner | avoid the special danger that patents applied to a free program could | ||||||
|       or by an individual or Legal Entity authorized to submit on behalf of | make it effectively proprietary.  To prevent this, the GPL assures that | ||||||
|       the copyright owner. For the purposes of this definition, "submitted" | patents cannot be used to render the program non-free. | ||||||
|       means any form of electronic, verbal, or written communication sent |  | ||||||
|       to the Licensor or its representatives, including but not limited to |  | ||||||
|       communication on electronic mailing lists, source code control systems, |  | ||||||
|       and issue tracking systems that are managed by, or on behalf of, the |  | ||||||
|       Licensor for the purpose of discussing and improving the Work, but |  | ||||||
|       excluding communication that is conspicuously marked or otherwise |  | ||||||
|       designated in writing by the copyright owner as "Not a Contribution." |  | ||||||
|  |  | ||||||
|       "Contributor" shall mean Licensor and any individual or Legal Entity |   The precise terms and conditions for copying, distribution and | ||||||
|       on behalf of whom a Contribution has been received by Licensor and | modification follow. | ||||||
|       subsequently incorporated within the Work. |  | ||||||
|  |  | ||||||
|    2. Grant of Copyright License. Subject to the terms and conditions of |                        TERMS AND CONDITIONS | ||||||
|       this License, each Contributor hereby grants to You a perpetual, |  | ||||||
|       worldwide, non-exclusive, no-charge, royalty-free, irrevocable |  | ||||||
|       copyright license to reproduce, prepare Derivative Works of, |  | ||||||
|       publicly display, publicly perform, sublicense, and distribute the |  | ||||||
|       Work and such Derivative Works in Source or Object form. |  | ||||||
|  |  | ||||||
|    3. Grant of Patent License. Subject to the terms and conditions of |   0. Definitions. | ||||||
|       this License, each Contributor hereby grants to You a perpetual, |  | ||||||
|       worldwide, non-exclusive, no-charge, royalty-free, irrevocable |  | ||||||
|       (except as stated in this section) patent license to make, have made, |  | ||||||
|       use, offer to sell, sell, import, and otherwise transfer the Work, |  | ||||||
|       where such license applies only to those patent claims licensable |  | ||||||
|       by such Contributor that are necessarily infringed by their |  | ||||||
|       Contribution(s) alone or by combination of their Contribution(s) |  | ||||||
|       with the Work to which such Contribution(s) was submitted. If You |  | ||||||
|       institute patent litigation against any entity (including a |  | ||||||
|       cross-claim or counterclaim in a lawsuit) alleging that the Work |  | ||||||
|       or a Contribution incorporated within the Work constitutes direct |  | ||||||
|       or contributory patent infringement, then any patent licenses |  | ||||||
|       granted to You under this License for that Work shall terminate |  | ||||||
|       as of the date such litigation is filed. |  | ||||||
|  |  | ||||||
|    4. Redistribution. You may reproduce and distribute copies of the |   "This License" refers to version 3 of the GNU General Public License. | ||||||
|       Work or Derivative Works thereof in any medium, with or without |  | ||||||
|       modifications, and in Source or Object form, provided that You |  | ||||||
|       meet the following conditions: |  | ||||||
|  |  | ||||||
|       (a) You must give any other recipients of the Work or |   "Copyright" also means copyright-like laws that apply to other kinds of | ||||||
|           Derivative Works a copy of this License; and | works, such as semiconductor masks. | ||||||
|  |  | ||||||
|       (b) You must cause any modified files to carry prominent notices |   "The Program" refers to any copyrightable work licensed under this | ||||||
|           stating that You changed the files; and | License.  Each licensee is addressed as "you".  "Licensees" and | ||||||
|  | "recipients" may be individuals or organizations. | ||||||
|  |  | ||||||
|       (c) You must retain, in the Source form of any Derivative Works |   To "modify" a work means to copy from or adapt all or part of the work | ||||||
|           that You distribute, all copyright, patent, trademark, and | in a fashion requiring copyright permission, other than the making of an | ||||||
|           attribution notices from the Source form of the Work, | exact copy.  The resulting work is called a "modified version" of the | ||||||
|           excluding those notices that do not pertain to any part of | earlier work or a work "based on" the earlier work. | ||||||
|           the Derivative Works; and |  | ||||||
|  |  | ||||||
|       (d) If the Work includes a "NOTICE" text file as part of its |   A "covered work" means either the unmodified Program or a work based | ||||||
|           distribution, then any Derivative Works that You distribute must | on the Program. | ||||||
|           include a readable copy of the attribution notices contained |  | ||||||
|           within such NOTICE file, excluding those notices that do not |  | ||||||
|           pertain to any part of the Derivative Works, in at least one |  | ||||||
|           of the following places: within a NOTICE text file distributed |  | ||||||
|           as part of the Derivative Works; within the Source form or |  | ||||||
|           documentation, if provided along with the Derivative Works; or, |  | ||||||
|           within a display generated by the Derivative Works, if and |  | ||||||
|           wherever such third-party notices normally appear. The contents |  | ||||||
|           of the NOTICE file are for informational purposes only and |  | ||||||
|           do not modify the License. You may add Your own attribution |  | ||||||
|           notices within Derivative Works that You distribute, alongside |  | ||||||
|           or as an addendum to the NOTICE text from the Work, provided |  | ||||||
|           that such additional attribution notices cannot be construed |  | ||||||
|           as modifying the License. |  | ||||||
|  |  | ||||||
|       You may add Your own copyright statement to Your modifications and |   To "propagate" a work means to do anything with it that, without | ||||||
|       may provide additional or different license terms and conditions | permission, would make you directly or secondarily liable for | ||||||
|       for use, reproduction, or distribution of Your modifications, or | infringement under applicable copyright law, except executing it on a | ||||||
|       for any such Derivative Works as a whole, provided Your use, | computer or modifying a private copy.  Propagation includes copying, | ||||||
|       reproduction, and distribution of the Work otherwise complies with | distribution (with or without modification), making available to the | ||||||
|       the conditions stated in this License. | public, and in some countries other activities as well. | ||||||
|  |  | ||||||
|    5. Submission of Contributions. Unless You explicitly state otherwise, |   To "convey" a work means any kind of propagation that enables other | ||||||
|       any Contribution intentionally submitted for inclusion in the Work | parties to make or receive copies.  Mere interaction with a user through | ||||||
|       by You to the Licensor shall be under the terms and conditions of | a computer network, with no transfer of a copy, is not conveying. | ||||||
|       this License, without any additional terms or conditions. |  | ||||||
|       Notwithstanding the above, nothing herein shall supersede or modify |  | ||||||
|       the terms of any separate license agreement you may have executed |  | ||||||
|       with Licensor regarding such Contributions. |  | ||||||
|  |  | ||||||
|    6. Trademarks. This License does not grant permission to use the trade |   An interactive user interface displays "Appropriate Legal Notices" | ||||||
|       names, trademarks, service marks, or product names of the Licensor, | to the extent that it includes a convenient and prominently visible | ||||||
|       except as required for reasonable and customary use in describing the | feature that (1) displays an appropriate copyright notice, and (2) | ||||||
|       origin of the Work and reproducing the content of the NOTICE file. | tells the user that there is no warranty for the work (except to the | ||||||
|  | extent that warranties are provided), that licensees may convey the | ||||||
|  | work under this License, and how to view a copy of this License.  If | ||||||
|  | the interface presents a list of user commands or options, such as a | ||||||
|  | menu, a prominent item in the list meets this criterion. | ||||||
|  |  | ||||||
|    7. Disclaimer of Warranty. Unless required by applicable law or |   1. Source Code. | ||||||
|       agreed to in writing, Licensor provides the Work (and each |  | ||||||
|       Contributor provides its Contributions) on an "AS IS" BASIS, |  | ||||||
|       WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |  | ||||||
|       implied, including, without limitation, any warranties or conditions |  | ||||||
|       of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A |  | ||||||
|       PARTICULAR PURPOSE. You are solely responsible for determining the |  | ||||||
|       appropriateness of using or redistributing the Work and assume any |  | ||||||
|       risks associated with Your exercise of permissions under this License. |  | ||||||
|  |  | ||||||
|    8. Limitation of Liability. In no event and under no legal theory, |   The "source code" for a work means the preferred form of the work | ||||||
|       whether in tort (including negligence), contract, or otherwise, | for making modifications to it.  "Object code" means any non-source | ||||||
|       unless required by applicable law (such as deliberate and grossly | form of a work. | ||||||
|       negligent acts) or agreed to in writing, shall any Contributor be |  | ||||||
|       liable to You for damages, including any direct, indirect, special, |  | ||||||
|       incidental, or consequential damages of any character arising as a |  | ||||||
|       result of this License or out of the use or inability to use the |  | ||||||
|       Work (including but not limited to damages for loss of goodwill, |  | ||||||
|       work stoppage, computer failure or malfunction, or any and all |  | ||||||
|       other commercial damages or losses), even if such Contributor |  | ||||||
|       has been advised of the possibility of such damages. |  | ||||||
|  |  | ||||||
|    9. Accepting Warranty or Additional Liability. While redistributing |   A "Standard Interface" means an interface that either is an official | ||||||
|       the Work or Derivative Works thereof, You may choose to offer, | standard defined by a recognized standards body, or, in the case of | ||||||
|       and charge a fee for, acceptance of support, warranty, indemnity, | interfaces specified for a particular programming language, one that | ||||||
|       or other liability obligations and/or rights consistent with this | is widely used among developers working in that language. | ||||||
|       License. However, in accepting such obligations, You may act only |  | ||||||
|       on Your own behalf and on Your sole responsibility, not on behalf |  | ||||||
|       of any other Contributor, and only if You agree to indemnify, |  | ||||||
|       defend, and hold each Contributor harmless for any liability |  | ||||||
|       incurred by, or claims asserted against, such Contributor by reason |  | ||||||
|       of your accepting any such warranty or additional liability. |  | ||||||
|  |  | ||||||
|    END OF TERMS AND CONDITIONS |   The "System Libraries" of an executable work include anything, other | ||||||
|  | than the work as a whole, that (a) is included in the normal form of | ||||||
|  | packaging a Major Component, but which is not part of that Major | ||||||
|  | Component, and (b) serves only to enable use of the work with that | ||||||
|  | Major Component, or to implement a Standard Interface for which an | ||||||
|  | implementation is available to the public in source code form.  A | ||||||
|  | "Major Component", in this context, means a major essential component | ||||||
|  | (kernel, window system, and so on) of the specific operating system | ||||||
|  | (if any) on which the executable work runs, or a compiler used to | ||||||
|  | produce the work, or an object code interpreter used to run it. | ||||||
|  |  | ||||||
|    APPENDIX: How to apply the Apache License to your work. |   The "Corresponding Source" for a work in object code form means all | ||||||
|  | the source code needed to generate, install, and (for an executable | ||||||
|  | work) run the object code and to modify the work, including scripts to | ||||||
|  | control those activities.  However, it does not include the work's | ||||||
|  | System Libraries, or general-purpose tools or generally available free | ||||||
|  | programs which are used unmodified in performing those activities but | ||||||
|  | which are not part of the work.  For example, Corresponding Source | ||||||
|  | includes interface definition files associated with source files for | ||||||
|  | the work, and the source code for shared libraries and dynamically | ||||||
|  | linked subprograms that the work is specifically designed to require, | ||||||
|  | such as by intimate data communication or control flow between those | ||||||
|  | subprograms and other parts of the work. | ||||||
|  |  | ||||||
|       To apply the Apache License to your work, attach the following |   The Corresponding Source need not include anything that users | ||||||
|       boilerplate notice, with the fields enclosed by brackets "{}" | can regenerate automatically from other parts of the Corresponding | ||||||
|       replaced with your own identifying information. (Don't include | Source. | ||||||
|       the brackets!)  The text should be enclosed in the appropriate |  | ||||||
|       comment syntax for the file format. We also recommend that a |  | ||||||
|       file or class name and description of purpose be included on the |  | ||||||
|       same "printed page" as the copyright notice for easier |  | ||||||
|       identification within third-party archives. |  | ||||||
|  |  | ||||||
|    Copyright 2017 Pascal Vizeli |   The Corresponding Source for a work in source code form is that | ||||||
|  | same work. | ||||||
|  |  | ||||||
|    Licensed under the Apache License, Version 2.0 (the "License"); |   2. Basic Permissions. | ||||||
|    you may not use this file except in compliance with the License. |  | ||||||
|    You may obtain a copy of the License at |  | ||||||
|  |  | ||||||
|        http://www.apache.org/licenses/LICENSE-2.0 |   All rights granted under this License are granted for the term of | ||||||
|  | copyright on the Program, and are irrevocable provided the stated | ||||||
|  | conditions are met.  This License explicitly affirms your unlimited | ||||||
|  | permission to run the unmodified Program.  The output from running a | ||||||
|  | covered work is covered by this License only if the output, given its | ||||||
|  | content, constitutes a covered work.  This License acknowledges your | ||||||
|  | rights of fair use or other equivalent, as provided by copyright law. | ||||||
|  |  | ||||||
|    Unless required by applicable law or agreed to in writing, software |   You may make, run and propagate covered works that you do not | ||||||
|    distributed under the License is distributed on an "AS IS" BASIS, | convey, without conditions so long as your license otherwise remains | ||||||
|    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | in force.  You may convey covered works to others for the sole purpose | ||||||
|    See the License for the specific language governing permissions and | of having them make modifications exclusively for you, or provide you | ||||||
|    limitations under the License. | with facilities for running those works, provided that you comply with | ||||||
|  | the terms of this License in conveying all material for which you do | ||||||
|  | not control copyright.  Those thus making or running the covered works | ||||||
|  | for you must do so exclusively on your behalf, under your direction | ||||||
|  | and control, on terms that prohibit them from making any copies of | ||||||
|  | your copyrighted material outside their relationship with you. | ||||||
|  |  | ||||||
|  |   Conveying under any other circumstances is permitted solely under | ||||||
|  | the conditions stated below.  Sublicensing is not allowed; section 10 | ||||||
|  | makes it unnecessary. | ||||||
|  |  | ||||||
|  |   3. Protecting Users' Legal Rights From Anti-Circumvention Law. | ||||||
|  |  | ||||||
|  |   No covered work shall be deemed part of an effective technological | ||||||
|  | measure under any applicable law fulfilling obligations under article | ||||||
|  | 11 of the WIPO copyright treaty adopted on 20 December 1996, or | ||||||
|  | similar laws prohibiting or restricting circumvention of such | ||||||
|  | measures. | ||||||
|  |  | ||||||
|  |   When you convey a covered work, you waive any legal power to forbid | ||||||
|  | circumvention of technological measures to the extent such circumvention | ||||||
|  | is effected by exercising rights under this License with respect to | ||||||
|  | the covered work, and you disclaim any intention to limit operation or | ||||||
|  | modification of the work as a means of enforcing, against the work's | ||||||
|  | users, your or third parties' legal rights to forbid circumvention of | ||||||
|  | technological measures. | ||||||
|  |  | ||||||
|  |   4. Conveying Verbatim Copies. | ||||||
|  |  | ||||||
|  |   You may convey verbatim copies of the Program's source code as you | ||||||
|  | receive it, in any medium, provided that you conspicuously and | ||||||
|  | appropriately publish on each copy an appropriate copyright notice; | ||||||
|  | keep intact all notices stating that this License and any | ||||||
|  | non-permissive terms added in accord with section 7 apply to the code; | ||||||
|  | keep intact all notices of the absence of any warranty; and give all | ||||||
|  | recipients a copy of this License along with the Program. | ||||||
|  |  | ||||||
|  |   You may charge any price or no price for each copy that you convey, | ||||||
|  | and you may offer support or warranty protection for a fee. | ||||||
|  |  | ||||||
|  |   5. Conveying Modified Source Versions. | ||||||
|  |  | ||||||
|  |   You may convey a work based on the Program, or the modifications to | ||||||
|  | produce it from the Program, in the form of source code under the | ||||||
|  | terms of section 4, provided that you also meet all of these conditions: | ||||||
|  |  | ||||||
|  |     a) The work must carry prominent notices stating that you modified | ||||||
|  |     it, and giving a relevant date. | ||||||
|  |  | ||||||
|  |     b) The work must carry prominent notices stating that it is | ||||||
|  |     released under this License and any conditions added under section | ||||||
|  |     7.  This requirement modifies the requirement in section 4 to | ||||||
|  |     "keep intact all notices". | ||||||
|  |  | ||||||
|  |     c) You must license the entire work, as a whole, under this | ||||||
|  |     License to anyone who comes into possession of a copy.  This | ||||||
|  |     License will therefore apply, along with any applicable section 7 | ||||||
|  |     additional terms, to the whole of the work, and all its parts, | ||||||
|  |     regardless of how they are packaged.  This License gives no | ||||||
|  |     permission to license the work in any other way, but it does not | ||||||
|  |     invalidate such permission if you have separately received it. | ||||||
|  |  | ||||||
|  |     d) If the work has interactive user interfaces, each must display | ||||||
|  |     Appropriate Legal Notices; however, if the Program has interactive | ||||||
|  |     interfaces that do not display Appropriate Legal Notices, your | ||||||
|  |     work need not make them do so. | ||||||
|  |  | ||||||
|  |   A compilation of a covered work with other separate and independent | ||||||
|  | works, which are not by their nature extensions of the covered work, | ||||||
|  | and which are not combined with it such as to form a larger program, | ||||||
|  | in or on a volume of a storage or distribution medium, is called an | ||||||
|  | "aggregate" if the compilation and its resulting copyright are not | ||||||
|  | used to limit the access or legal rights of the compilation's users | ||||||
|  | beyond what the individual works permit.  Inclusion of a covered work | ||||||
|  | in an aggregate does not cause this License to apply to the other | ||||||
|  | parts of the aggregate. | ||||||
|  |  | ||||||
|  |   6. Conveying Non-Source Forms. | ||||||
|  |  | ||||||
|  |   You may convey a covered work in object code form under the terms | ||||||
|  | of sections 4 and 5, provided that you also convey the | ||||||
|  | machine-readable Corresponding Source under the terms of this License, | ||||||
|  | in one of these ways: | ||||||
|  |  | ||||||
|  |     a) Convey the object code in, or embodied in, a physical product | ||||||
|  |     (including a physical distribution medium), accompanied by the | ||||||
|  |     Corresponding Source fixed on a durable physical medium | ||||||
|  |     customarily used for software interchange. | ||||||
|  |  | ||||||
|  |     b) Convey the object code in, or embodied in, a physical product | ||||||
|  |     (including a physical distribution medium), accompanied by a | ||||||
|  |     written offer, valid for at least three years and valid for as | ||||||
|  |     long as you offer spare parts or customer support for that product | ||||||
|  |     model, to give anyone who possesses the object code either (1) a | ||||||
|  |     copy of the Corresponding Source for all the software in the | ||||||
|  |     product that is covered by this License, on a durable physical | ||||||
|  |     medium customarily used for software interchange, for a price no | ||||||
|  |     more than your reasonable cost of physically performing this | ||||||
|  |     conveying of source, or (2) access to copy the | ||||||
|  |     Corresponding Source from a network server at no charge. | ||||||
|  |  | ||||||
|  |     c) Convey individual copies of the object code with a copy of the | ||||||
|  |     written offer to provide the Corresponding Source.  This | ||||||
|  |     alternative is allowed only occasionally and noncommercially, and | ||||||
|  |     only if you received the object code with such an offer, in accord | ||||||
|  |     with subsection 6b. | ||||||
|  |  | ||||||
|  |     d) Convey the object code by offering access from a designated | ||||||
|  |     place (gratis or for a charge), and offer equivalent access to the | ||||||
|  |     Corresponding Source in the same way through the same place at no | ||||||
|  |     further charge.  You need not require recipients to copy the | ||||||
|  |     Corresponding Source along with the object code.  If the place to | ||||||
|  |     copy the object code is a network server, the Corresponding Source | ||||||
|  |     may be on a different server (operated by you or a third party) | ||||||
|  |     that supports equivalent copying facilities, provided you maintain | ||||||
|  |     clear directions next to the object code saying where to find the | ||||||
|  |     Corresponding Source.  Regardless of what server hosts the | ||||||
|  |     Corresponding Source, you remain obligated to ensure that it is | ||||||
|  |     available for as long as needed to satisfy these requirements. | ||||||
|  |  | ||||||
|  |     e) Convey the object code using peer-to-peer transmission, provided | ||||||
|  |     you inform other peers where the object code and Corresponding | ||||||
|  |     Source of the work are being offered to the general public at no | ||||||
|  |     charge under subsection 6d. | ||||||
|  |  | ||||||
|  |   A separable portion of the object code, whose source code is excluded | ||||||
|  | from the Corresponding Source as a System Library, need not be | ||||||
|  | included in conveying the object code work. | ||||||
|  |  | ||||||
|  |   A "User Product" is either (1) a "consumer product", which means any | ||||||
|  | tangible personal property which is normally used for personal, family, | ||||||
|  | or household purposes, or (2) anything designed or sold for incorporation | ||||||
|  | into a dwelling.  In determining whether a product is a consumer product, | ||||||
|  | doubtful cases shall be resolved in favor of coverage.  For a particular | ||||||
|  | product received by a particular user, "normally used" refers to a | ||||||
|  | typical or common use of that class of product, regardless of the status | ||||||
|  | of the particular user or of the way in which the particular user | ||||||
|  | actually uses, or expects or is expected to use, the product.  A product | ||||||
|  | is a consumer product regardless of whether the product has substantial | ||||||
|  | commercial, industrial or non-consumer uses, unless such uses represent | ||||||
|  | the only significant mode of use of the product. | ||||||
|  |  | ||||||
|  |   "Installation Information" for a User Product means any methods, | ||||||
|  | procedures, authorization keys, or other information required to install | ||||||
|  | and execute modified versions of a covered work in that User Product from | ||||||
|  | a modified version of its Corresponding Source.  The information must | ||||||
|  | suffice to ensure that the continued functioning of the modified object | ||||||
|  | code is in no case prevented or interfered with solely because | ||||||
|  | modification has been made. | ||||||
|  |  | ||||||
|  |   If you convey an object code work under this section in, or with, or | ||||||
|  | specifically for use in, a User Product, and the conveying occurs as | ||||||
|  | part of a transaction in which the right of possession and use of the | ||||||
|  | User Product is transferred to the recipient in perpetuity or for a | ||||||
|  | fixed term (regardless of how the transaction is characterized), the | ||||||
|  | Corresponding Source conveyed under this section must be accompanied | ||||||
|  | by the Installation Information.  But this requirement does not apply | ||||||
|  | if neither you nor any third party retains the ability to install | ||||||
|  | modified object code on the User Product (for example, the work has | ||||||
|  | been installed in ROM). | ||||||
|  |  | ||||||
|  |   The requirement to provide Installation Information does not include a | ||||||
|  | requirement to continue to provide support service, warranty, or updates | ||||||
|  | for a work that has been modified or installed by the recipient, or for | ||||||
|  | the User Product in which it has been modified or installed.  Access to a | ||||||
|  | network may be denied when the modification itself materially and | ||||||
|  | adversely affects the operation of the network or violates the rules and | ||||||
|  | protocols for communication across the network. | ||||||
|  |  | ||||||
|  |   Corresponding Source conveyed, and Installation Information provided, | ||||||
|  | in accord with this section must be in a format that is publicly | ||||||
|  | documented (and with an implementation available to the public in | ||||||
|  | source code form), and must require no special password or key for | ||||||
|  | unpacking, reading or copying. | ||||||
|  |  | ||||||
|  |   7. Additional Terms. | ||||||
|  |  | ||||||
|  |   "Additional permissions" are terms that supplement the terms of this | ||||||
|  | License by making exceptions from one or more of its conditions. | ||||||
|  | Additional permissions that are applicable to the entire Program shall | ||||||
|  | be treated as though they were included in this License, to the extent | ||||||
|  | that they are valid under applicable law.  If additional permissions | ||||||
|  | apply only to part of the Program, that part may be used separately | ||||||
|  | under those permissions, but the entire Program remains governed by | ||||||
|  | this License without regard to the additional permissions. | ||||||
|  |  | ||||||
|  |   When you convey a copy of a covered work, you may at your option | ||||||
|  | remove any additional permissions from that copy, or from any part of | ||||||
|  | it.  (Additional permissions may be written to require their own | ||||||
|  | removal in certain cases when you modify the work.)  You may place | ||||||
|  | additional permissions on material, added by you to a covered work, | ||||||
|  | for which you have or can give appropriate copyright permission. | ||||||
|  |  | ||||||
|  |   Notwithstanding any other provision of this License, for material you | ||||||
|  | add to a covered work, you may (if authorized by the copyright holders of | ||||||
|  | that material) supplement the terms of this License with terms: | ||||||
|  |  | ||||||
|  |     a) Disclaiming warranty or limiting liability differently from the | ||||||
|  |     terms of sections 15 and 16 of this License; or | ||||||
|  |  | ||||||
|  |     b) Requiring preservation of specified reasonable legal notices or | ||||||
|  |     author attributions in that material or in the Appropriate Legal | ||||||
|  |     Notices displayed by works containing it; or | ||||||
|  |  | ||||||
|  |     c) Prohibiting misrepresentation of the origin of that material, or | ||||||
|  |     requiring that modified versions of such material be marked in | ||||||
|  |     reasonable ways as different from the original version; or | ||||||
|  |  | ||||||
|  |     d) Limiting the use for publicity purposes of names of licensors or | ||||||
|  |     authors of the material; or | ||||||
|  |  | ||||||
|  |     e) Declining to grant rights under trademark law for use of some | ||||||
|  |     trade names, trademarks, or service marks; or | ||||||
|  |  | ||||||
|  |     f) Requiring indemnification of licensors and authors of that | ||||||
|  |     material by anyone who conveys the material (or modified versions of | ||||||
|  |     it) with contractual assumptions of liability to the recipient, for | ||||||
|  |     any liability that these contractual assumptions directly impose on | ||||||
|  |     those licensors and authors. | ||||||
|  |  | ||||||
|  |   All other non-permissive additional terms are considered "further | ||||||
|  | restrictions" within the meaning of section 10.  If the Program as you | ||||||
|  | received it, or any part of it, contains a notice stating that it is | ||||||
|  | governed by this License along with a term that is a further | ||||||
|  | restriction, you may remove that term.  If a license document contains | ||||||
|  | a further restriction but permits relicensing or conveying under this | ||||||
|  | License, you may add to a covered work material governed by the terms | ||||||
|  | of that license document, provided that the further restriction does | ||||||
|  | not survive such relicensing or conveying. | ||||||
|  |  | ||||||
|  |   If you add terms to a covered work in accord with this section, you | ||||||
|  | must place, in the relevant source files, a statement of the | ||||||
|  | additional terms that apply to those files, or a notice indicating | ||||||
|  | where to find the applicable terms. | ||||||
|  |  | ||||||
|  |   Additional terms, permissive or non-permissive, may be stated in the | ||||||
|  | form of a separately written license, or stated as exceptions; | ||||||
|  | the above requirements apply either way. | ||||||
|  |  | ||||||
|  |   8. Termination. | ||||||
|  |  | ||||||
|  |   You may not propagate or modify a covered work except as expressly | ||||||
|  | provided under this License.  Any attempt otherwise to propagate or | ||||||
|  | modify it is void, and will automatically terminate your rights under | ||||||
|  | this License (including any patent licenses granted under the third | ||||||
|  | paragraph of section 11). | ||||||
|  |  | ||||||
|  |   However, if you cease all violation of this License, then your | ||||||
|  | license from a particular copyright holder is reinstated (a) | ||||||
|  | provisionally, unless and until the copyright holder explicitly and | ||||||
|  | finally terminates your license, and (b) permanently, if the copyright | ||||||
|  | holder fails to notify you of the violation by some reasonable means | ||||||
|  | prior to 60 days after the cessation. | ||||||
|  |  | ||||||
|  |   Moreover, your license from a particular copyright holder is | ||||||
|  | reinstated permanently if the copyright holder notifies you of the | ||||||
|  | violation by some reasonable means, this is the first time you have | ||||||
|  | received notice of violation of this License (for any work) from that | ||||||
|  | copyright holder, and you cure the violation prior to 30 days after | ||||||
|  | your receipt of the notice. | ||||||
|  |  | ||||||
|  |   Termination of your rights under this section does not terminate the | ||||||
|  | licenses of parties who have received copies or rights from you under | ||||||
|  | this License.  If your rights have been terminated and not permanently | ||||||
|  | reinstated, you do not qualify to receive new licenses for the same | ||||||
|  | material under section 10. | ||||||
|  |  | ||||||
|  |   9. Acceptance Not Required for Having Copies. | ||||||
|  |  | ||||||
|  |   You are not required to accept this License in order to receive or | ||||||
|  | run a copy of the Program.  Ancillary propagation of a covered work | ||||||
|  | occurring solely as a consequence of using peer-to-peer transmission | ||||||
|  | to receive a copy likewise does not require acceptance.  However, | ||||||
|  | nothing other than this License grants you permission to propagate or | ||||||
|  | modify any covered work.  These actions infringe copyright if you do | ||||||
|  | not accept this License.  Therefore, by modifying or propagating a | ||||||
|  | covered work, you indicate your acceptance of this License to do so. | ||||||
|  |  | ||||||
|  |   10. Automatic Licensing of Downstream Recipients. | ||||||
|  |  | ||||||
|  |   Each time you convey a covered work, the recipient automatically | ||||||
|  | receives a license from the original licensors, to run, modify and | ||||||
|  | propagate that work, subject to this License.  You are not responsible | ||||||
|  | for enforcing compliance by third parties with this License. | ||||||
|  |  | ||||||
|  |   An "entity transaction" is a transaction transferring control of an | ||||||
|  | organization, or substantially all assets of one, or subdividing an | ||||||
|  | organization, or merging organizations.  If propagation of a covered | ||||||
|  | work results from an entity transaction, each party to that | ||||||
|  | transaction who receives a copy of the work also receives whatever | ||||||
|  | licenses to the work the party's predecessor in interest had or could | ||||||
|  | give under the previous paragraph, plus a right to possession of the | ||||||
|  | Corresponding Source of the work from the predecessor in interest, if | ||||||
|  | the predecessor has it or can get it with reasonable efforts. | ||||||
|  |  | ||||||
|  |   You may not impose any further restrictions on the exercise of the | ||||||
|  | rights granted or affirmed under this License.  For example, you may | ||||||
|  | not impose a license fee, royalty, or other charge for exercise of | ||||||
|  | rights granted under this License, and you may not initiate litigation | ||||||
|  | (including a cross-claim or counterclaim in a lawsuit) alleging that | ||||||
|  | any patent claim is infringed by making, using, selling, offering for | ||||||
|  | sale, or importing the Program or any portion of it. | ||||||
|  |  | ||||||
|  |   11. Patents. | ||||||
|  |  | ||||||
|  |   A "contributor" is a copyright holder who authorizes use under this | ||||||
|  | License of the Program or a work on which the Program is based.  The | ||||||
|  | work thus licensed is called the contributor's "contributor version". | ||||||
|  |  | ||||||
|  |   A contributor's "essential patent claims" are all patent claims | ||||||
|  | owned or controlled by the contributor, whether already acquired or | ||||||
|  | hereafter acquired, that would be infringed by some manner, permitted | ||||||
|  | by this License, of making, using, or selling its contributor version, | ||||||
|  | but do not include claims that would be infringed only as a | ||||||
|  | consequence of further modification of the contributor version.  For | ||||||
|  | purposes of this definition, "control" includes the right to grant | ||||||
|  | patent sublicenses in a manner consistent with the requirements of | ||||||
|  | this License. | ||||||
|  |  | ||||||
|  |   Each contributor grants you a non-exclusive, worldwide, royalty-free | ||||||
|  | patent license under the contributor's essential patent claims, to | ||||||
|  | make, use, sell, offer for sale, import and otherwise run, modify and | ||||||
|  | propagate the contents of its contributor version. | ||||||
|  |  | ||||||
|  |   In the following three paragraphs, a "patent license" is any express | ||||||
|  | agreement or commitment, however denominated, not to enforce a patent | ||||||
|  | (such as an express permission to practice a patent or covenant not to | ||||||
|  | sue for patent infringement).  To "grant" such a patent license to a | ||||||
|  | party means to make such an agreement or commitment not to enforce a | ||||||
|  | patent against the party. | ||||||
|  |  | ||||||
|  |   If you convey a covered work, knowingly relying on a patent license, | ||||||
|  | and the Corresponding Source of the work is not available for anyone | ||||||
|  | to copy, free of charge and under the terms of this License, through a | ||||||
|  | publicly available network server or other readily accessible means, | ||||||
|  | then you must either (1) cause the Corresponding Source to be so | ||||||
|  | available, or (2) arrange to deprive yourself of the benefit of the | ||||||
|  | patent license for this particular work, or (3) arrange, in a manner | ||||||
|  | consistent with the requirements of this License, to extend the patent | ||||||
|  | license to downstream recipients.  "Knowingly relying" means you have | ||||||
|  | actual knowledge that, but for the patent license, your conveying the | ||||||
|  | covered work in a country, or your recipient's use of the covered work | ||||||
|  | in a country, would infringe one or more identifiable patents in that | ||||||
|  | country that you have reason to believe are valid. | ||||||
|  |  | ||||||
|  |   If, pursuant to or in connection with a single transaction or | ||||||
|  | arrangement, you convey, or propagate by procuring conveyance of, a | ||||||
|  | covered work, and grant a patent license to some of the parties | ||||||
|  | receiving the covered work authorizing them to use, propagate, modify | ||||||
|  | or convey a specific copy of the covered work, then the patent license | ||||||
|  | you grant is automatically extended to all recipients of the covered | ||||||
|  | work and works based on it. | ||||||
|  |  | ||||||
|  |   A patent license is "discriminatory" if it does not include within | ||||||
|  | the scope of its coverage, prohibits the exercise of, or is | ||||||
|  | conditioned on the non-exercise of one or more of the rights that are | ||||||
|  | specifically granted under this License.  You may not convey a covered | ||||||
|  | work if you are a party to an arrangement with a third party that is | ||||||
|  | in the business of distributing software, under which you make payment | ||||||
|  | to the third party based on the extent of your activity of conveying | ||||||
|  | the work, and under which the third party grants, to any of the | ||||||
|  | parties who would receive the covered work from you, a discriminatory | ||||||
|  | patent license (a) in connection with copies of the covered work | ||||||
|  | conveyed by you (or copies made from those copies), or (b) primarily | ||||||
|  | for and in connection with specific products or compilations that | ||||||
|  | contain the covered work, unless you entered into that arrangement, | ||||||
|  | or that patent license was granted, prior to 28 March 2007. | ||||||
|  |  | ||||||
|  |   Nothing in this License shall be construed as excluding or limiting | ||||||
|  | any implied license or other defenses to infringement that may | ||||||
|  | otherwise be available to you under applicable patent law. | ||||||
|  |  | ||||||
|  |   12. No Surrender of Others' Freedom. | ||||||
|  |  | ||||||
|  |   If conditions are imposed on you (whether by court order, agreement or | ||||||
|  | otherwise) that contradict the conditions of this License, they do not | ||||||
|  | excuse you from the conditions of this License.  If you cannot convey a | ||||||
|  | covered work so as to satisfy simultaneously your obligations under this | ||||||
|  | License and any other pertinent obligations, then as a consequence you may | ||||||
|  | not convey it at all.  For example, if you agree to terms that obligate you | ||||||
|  | to collect a royalty for further conveying from those to whom you convey | ||||||
|  | the Program, the only way you could satisfy both those terms and this | ||||||
|  | License would be to refrain entirely from conveying the Program. | ||||||
|  |  | ||||||
|  |   13. Use with the GNU Affero General Public License. | ||||||
|  |  | ||||||
|  |   Notwithstanding any other provision of this License, you have | ||||||
|  | permission to link or combine any covered work with a work licensed | ||||||
|  | under version 3 of the GNU Affero General Public License into a single | ||||||
|  | combined work, and to convey the resulting work.  The terms of this | ||||||
|  | License will continue to apply to the part which is the covered work, | ||||||
|  | but the special requirements of the GNU Affero General Public License, | ||||||
|  | section 13, concerning interaction through a network will apply to the | ||||||
|  | combination as such. | ||||||
|  |  | ||||||
|  |   14. Revised Versions of this License. | ||||||
|  |  | ||||||
|  |   The Free Software Foundation may publish revised and/or new versions of | ||||||
|  | the GNU General Public License from time to time.  Such new versions will | ||||||
|  | be similar in spirit to the present version, but may differ in detail to | ||||||
|  | address new problems or concerns. | ||||||
|  |  | ||||||
|  |   Each version is given a distinguishing version number.  If the | ||||||
|  | Program specifies that a certain numbered version of the GNU General | ||||||
|  | Public License "or any later version" applies to it, you have the | ||||||
|  | option of following the terms and conditions either of that numbered | ||||||
|  | version or of any later version published by the Free Software | ||||||
|  | Foundation.  If the Program does not specify a version number of the | ||||||
|  | GNU General Public License, you may choose any version ever published | ||||||
|  | by the Free Software Foundation. | ||||||
|  |  | ||||||
|  |   If the Program specifies that a proxy can decide which future | ||||||
|  | versions of the GNU General Public License can be used, that proxy's | ||||||
|  | public statement of acceptance of a version permanently authorizes you | ||||||
|  | to choose that version for the Program. | ||||||
|  |  | ||||||
|  |   Later license versions may give you additional or different | ||||||
|  | permissions.  However, no additional obligations are imposed on any | ||||||
|  | author or copyright holder as a result of your choosing to follow a | ||||||
|  | later version. | ||||||
|  |  | ||||||
|  |   15. Disclaimer of Warranty. | ||||||
|  |  | ||||||
|  |   THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY | ||||||
|  | APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT | ||||||
|  | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY | ||||||
|  | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, | ||||||
|  | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | ||||||
|  | PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM | ||||||
|  | IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF | ||||||
|  | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. | ||||||
|  |  | ||||||
|  |   16. Limitation of Liability. | ||||||
|  |  | ||||||
|  |   IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING | ||||||
|  | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS | ||||||
|  | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY | ||||||
|  | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE | ||||||
|  | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF | ||||||
|  | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD | ||||||
|  | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), | ||||||
|  | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF | ||||||
|  | SUCH DAMAGES. | ||||||
|  |  | ||||||
|  |   17. Interpretation of Sections 15 and 16. | ||||||
|  |  | ||||||
|  |   If the disclaimer of warranty and limitation of liability provided | ||||||
|  | above cannot be given local legal effect according to their terms, | ||||||
|  | reviewing courts shall apply local law that most closely approximates | ||||||
|  | an absolute waiver of all civil liability in connection with the | ||||||
|  | Program, unless a warranty or assumption of liability accompanies a | ||||||
|  | copy of the Program in return for a fee. | ||||||
|  |  | ||||||
|  |                      END OF TERMS AND CONDITIONS | ||||||
|  |  | ||||||
|  |             How to Apply These Terms to Your New Programs | ||||||
|  |  | ||||||
|  |   If you develop a new program, and you want it to be of the greatest | ||||||
|  | possible use to the public, the best way to achieve this is to make it | ||||||
|  | free software which everyone can redistribute and change under these terms. | ||||||
|  |  | ||||||
|  |   To do so, attach the following notices to the program.  It is safest | ||||||
|  | to attach them to the start of each source file to most effectively | ||||||
|  | state the exclusion of warranty; and each file should have at least | ||||||
|  | the "copyright" line and a pointer to where the full notice is found. | ||||||
|  |  | ||||||
|  |     <one line to give the program's name and a brief idea of what it does.> | ||||||
|  |     Copyright (C) <year>  <name of author> | ||||||
|  |  | ||||||
|  |     This program is free software: you can redistribute it and/or modify | ||||||
|  |     it under the terms of the GNU General Public License as published by | ||||||
|  |     the Free Software Foundation, either version 3 of the License, or | ||||||
|  |     (at your option) any later version. | ||||||
|  |  | ||||||
|  |     This program is distributed in the hope that it will be useful, | ||||||
|  |     but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  |     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  |     GNU General Public License for more details. | ||||||
|  |  | ||||||
|  |     You should have received a copy of the GNU General Public License | ||||||
|  |     along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  |  | ||||||
|  | Also add information on how to contact you by electronic and paper mail. | ||||||
|  |  | ||||||
|  |   If the program does terminal interaction, make it output a short | ||||||
|  | notice like this when it starts in an interactive mode: | ||||||
|  |  | ||||||
|  |     <program>  Copyright (C) <year>  <name of author> | ||||||
|  |     This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. | ||||||
|  |     This is free software, and you are welcome to redistribute it | ||||||
|  |     under certain conditions; type `show c' for details. | ||||||
|  |  | ||||||
|  | The hypothetical commands `show w' and `show c' should show the appropriate | ||||||
|  | parts of the General Public License.  Of course, your program's commands | ||||||
|  | might be different; for a GUI interface, you would use an "about box". | ||||||
|  |  | ||||||
|  |   You should also get your employer (if you work as a programmer) or school, | ||||||
|  | if any, to sign a "copyright disclaimer" for the program, if necessary. | ||||||
|  | For more information on this, and how to apply and follow the GNU GPL, see | ||||||
|  | <https://www.gnu.org/licenses/>. | ||||||
|  |  | ||||||
|  |   The GNU General Public License does not permit incorporating your program | ||||||
|  | into proprietary programs.  If your program is a subroutine library, you | ||||||
|  | may consider it more useful to permit linking proprietary applications with | ||||||
|  | the library.  If this is what you want to do, use the GNU Lesser General | ||||||
|  | Public License instead of this License.  But first, please read | ||||||
|  | <https://www.gnu.org/licenses/why-not-lgpl.html>. | ||||||
|   | |||||||
| @@ -1,3 +1,3 @@ | |||||||
| include LICENSE.md | include LICENSE.md | ||||||
| graft hassio | graft supervisor | ||||||
| recursive-exclude * *.py[co] | recursive-exclude * *.py[co] | ||||||
|   | |||||||
							
								
								
									
										30
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										30
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,14 +1,26 @@ | |||||||
| # Hass.io | # Home Assistant Supervisor | ||||||
|  |  | ||||||
| ### First private cloud solution for home automation. | ## First private cloud solution for home automation | ||||||
|  |  | ||||||
| Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software. | Home Assistant (former Hass.io) is a container-based system for managing your | ||||||
|  | Home Assistant Core installation and related applications. The system is | ||||||
|  | controlled via Home Assistant which communicates with the Supervisor. The | ||||||
|  | Supervisor provides an API to manage the installation. This includes changing | ||||||
| - [Hass.io Addons](https://github.com/home-assistant/hassio-addons) | network settings or installing and updating software. | ||||||
| - [Hass.io Build](https://github.com/home-assistant/hassio-build) |  | ||||||
|  |  | ||||||
| ## Installation | ## Installation | ||||||
|  |  | ||||||
| Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio). | Installation instructions can be found at https://home-assistant.io/hassio. | ||||||
|  |  | ||||||
|  | ## Development | ||||||
|  |  | ||||||
|  | The development of the Supervisor is not difficult but tricky. | ||||||
|  |  | ||||||
|  | - You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder | ||||||
|  | - Access a HassOS device or VM and pull your Supervisor. | ||||||
|  | - Set the developer modus with the CLI tool: `ha supervisor options --channel=dev` | ||||||
|  | - Tag it as `homeassistant/xy-hassio-supervisor:latest` | ||||||
|  | - Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor` | ||||||
|  | - Test your changes | ||||||
|  |  | ||||||
|  | For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks. | ||||||
|   | |||||||
							
								
								
									
										52
									
								
								azure-pipelines-ci.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								azure-pipelines-ci.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | |||||||
|  | # https://dev.azure.com/home-assistant | ||||||
|  |  | ||||||
|  | trigger: | ||||||
|  |   batch: true | ||||||
|  |   branches: | ||||||
|  |     include: | ||||||
|  |       - master | ||||||
|  |       - dev | ||||||
|  | pr: | ||||||
|  |   - dev | ||||||
|  | variables: | ||||||
|  |   - name: versionHadolint | ||||||
|  |     value: "v1.16.3" | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   - job: "Tox" | ||||||
|  |     pool: | ||||||
|  |       vmImage: "ubuntu-latest" | ||||||
|  |     steps: | ||||||
|  |       - script: | | ||||||
|  |           sudo apt-get update | ||||||
|  |           sudo apt-get install -y libpulse0 libudev1 | ||||||
|  |         displayName: "Install Host library" | ||||||
|  |       - task: UsePythonVersion@0 | ||||||
|  |         displayName: "Use Python 3.7" | ||||||
|  |         inputs: | ||||||
|  |           versionSpec: "3.7" | ||||||
|  |       - script: pip install tox | ||||||
|  |         displayName: "Install Tox" | ||||||
|  |       - script: tox | ||||||
|  |         displayName: "Run Tox" | ||||||
|  |   - job: "JQ" | ||||||
|  |     pool: | ||||||
|  |       vmImage: "ubuntu-latest" | ||||||
|  |     steps: | ||||||
|  |       - script: sudo apt-get install -y jq | ||||||
|  |         displayName: "Install JQ" | ||||||
|  |       - bash: | | ||||||
|  |           shopt -s globstar | ||||||
|  |           cat **/*.json | jq '.' | ||||||
|  |         displayName: "Run JQ" | ||||||
|  |   - job: "Hadolint" | ||||||
|  |     pool: | ||||||
|  |       vmImage: "ubuntu-latest" | ||||||
|  |     steps: | ||||||
|  |       - script: sudo docker pull hadolint/hadolint:$(versionHadolint) | ||||||
|  |         displayName: "Install Hadolint" | ||||||
|  |       - script: | | ||||||
|  |           sudo docker run --rm -i \ | ||||||
|  |             -v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \ | ||||||
|  |             hadolint/hadolint:$(versionHadolint) < Dockerfile | ||||||
|  |         displayName: "Run Hadolint" | ||||||
							
								
								
									
										53
									
								
								azure-pipelines-release.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								azure-pipelines-release.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | |||||||
|  | # https://dev.azure.com/home-assistant | ||||||
|  |  | ||||||
|  | trigger: | ||||||
|  |   batch: true | ||||||
|  |   branches: | ||||||
|  |     include: | ||||||
|  |       - dev | ||||||
|  |   tags: | ||||||
|  |     include: | ||||||
|  |       - "*" | ||||||
|  | pr: none | ||||||
|  | variables: | ||||||
|  |   - name: versionBuilder | ||||||
|  |     value: "7.0" | ||||||
|  |   - group: docker | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   - job: "VersionValidate" | ||||||
|  |     pool: | ||||||
|  |       vmImage: "ubuntu-latest" | ||||||
|  |     steps: | ||||||
|  |       - task: UsePythonVersion@0 | ||||||
|  |         displayName: "Use Python 3.7" | ||||||
|  |         inputs: | ||||||
|  |           versionSpec: "3.7" | ||||||
|  |       - script: | | ||||||
|  |           setup_version="$(python setup.py -V)" | ||||||
|  |           branch_version="$(Build.SourceBranchName)" | ||||||
|  |  | ||||||
|  |           if [ "${branch_version}" == "dev" ]; then | ||||||
|  |             exit 0 | ||||||
|  |           elif [ "${setup_version}" != "${branch_version}" ]; then | ||||||
|  |             echo "Version of tag ${branch_version} don't match with ${setup_version}!" | ||||||
|  |             exit 1 | ||||||
|  |           fi | ||||||
|  |         displayName: "Check version of branch/tag" | ||||||
|  |   - job: "Release" | ||||||
|  |     dependsOn: | ||||||
|  |       - "VersionValidate" | ||||||
|  |     pool: | ||||||
|  |       vmImage: "ubuntu-latest" | ||||||
|  |     steps: | ||||||
|  |       - script: sudo docker login -u $(dockerUser) -p $(dockerPassword) | ||||||
|  |         displayName: "Docker hub login" | ||||||
|  |       - script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder) | ||||||
|  |         displayName: "Install Builder" | ||||||
|  |       - script: | | ||||||
|  |           sudo docker run --rm --privileged \ | ||||||
|  |             -v ~/.docker:/root/.docker \ | ||||||
|  |             -v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \ | ||||||
|  |             homeassistant/amd64-builder:$(versionBuilder) \ | ||||||
|  |             --generic $(Build.SourceBranchName) --all -t /data | ||||||
|  |         displayName: "Build Release" | ||||||
							
								
								
									
										26
									
								
								azure-pipelines-wheels.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								azure-pipelines-wheels.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | |||||||
|  | # https://dev.azure.com/home-assistant | ||||||
|  |  | ||||||
|  | trigger: | ||||||
|  |   batch: true | ||||||
|  |   branches: | ||||||
|  |     include: | ||||||
|  |       - dev | ||||||
|  | pr: none | ||||||
|  | variables: | ||||||
|  |   - name: versionWheels | ||||||
|  |     value: '1.6.1-3.7-alpine3.11' | ||||||
|  | resources: | ||||||
|  |   repositories: | ||||||
|  |     - repository: azure | ||||||
|  |       type: github | ||||||
|  |       name: 'home-assistant/ci-azure' | ||||||
|  |       endpoint: 'home-assistant' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  | - template: templates/azp-job-wheels.yaml@azure | ||||||
|  |   parameters: | ||||||
|  |     builderVersion: '$(versionWheels)' | ||||||
|  |     builderApk: 'build-base;libffi-dev;openssl-dev' | ||||||
|  |     builderPip: 'Cython' | ||||||
|  |     wheelsRequirement: 'requirements.txt' | ||||||
							
								
								
									
										13
									
								
								build.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								build.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | { | ||||||
|  |   "image": "homeassistant/{arch}-hassio-supervisor", | ||||||
|  |   "build_from": { | ||||||
|  |     "aarch64": "homeassistant/aarch64-base-python:3.7-alpine3.11", | ||||||
|  |     "armhf": "homeassistant/armhf-base-python:3.7-alpine3.11", | ||||||
|  |     "armv7": "homeassistant/armv7-base-python:3.7-alpine3.11", | ||||||
|  |     "amd64": "homeassistant/amd64-base-python:3.7-alpine3.11", | ||||||
|  |     "i386": "homeassistant/i386-base-python:3.7-alpine3.11" | ||||||
|  |   }, | ||||||
|  |   "labels": { | ||||||
|  |     "io.hass.type": "supervisor" | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -1 +0,0 @@ | |||||||
| """Init file for HassIO.""" |  | ||||||
| @@ -1,57 +0,0 @@ | |||||||
| """Main file for HassIO.""" |  | ||||||
| import asyncio |  | ||||||
| from concurrent.futures import ThreadPoolExecutor |  | ||||||
| import logging |  | ||||||
| import sys |  | ||||||
|  |  | ||||||
| import hassio.bootstrap as bootstrap |  | ||||||
| import hassio.core as core |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def attempt_use_uvloop(): |  | ||||||
|     """Attempt to use uvloop.""" |  | ||||||
|     try: |  | ||||||
|         import uvloop |  | ||||||
|         asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) |  | ||||||
|     except ImportError: |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=invalid-name |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     bootstrap.initialize_logging() |  | ||||||
|     attempt_use_uvloop() |  | ||||||
|     loop = asyncio.get_event_loop() |  | ||||||
|  |  | ||||||
|     if not bootstrap.check_environment(): |  | ||||||
|         sys.exit(1) |  | ||||||
|  |  | ||||||
|     # init executor pool |  | ||||||
|     executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker") |  | ||||||
|     loop.set_default_executor(executor) |  | ||||||
|  |  | ||||||
|     _LOGGER.info("Initialize Hassio setup") |  | ||||||
|     config = bootstrap.initialize_system_data() |  | ||||||
|     hassio = core.HassIO(loop, config) |  | ||||||
|  |  | ||||||
|     bootstrap.migrate_system_env(config) |  | ||||||
|  |  | ||||||
|     _LOGGER.info("Setup HassIO") |  | ||||||
|     loop.run_until_complete(hassio.setup()) |  | ||||||
|  |  | ||||||
|     loop.call_soon_threadsafe(loop.create_task, hassio.start()) |  | ||||||
|     loop.call_soon_threadsafe(bootstrap.reg_signal, loop) |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         _LOGGER.info("Run HassIO") |  | ||||||
|         loop.run_forever() |  | ||||||
|     finally: |  | ||||||
|         _LOGGER.info("Stopping HassIO") |  | ||||||
|         loop.run_until_complete(hassio.stop()) |  | ||||||
|         executor.shutdown(wait=False) |  | ||||||
|         loop.close() |  | ||||||
|  |  | ||||||
|     _LOGGER.info("Close Hassio") |  | ||||||
|     sys.exit(0) |  | ||||||
| @@ -1,133 +0,0 @@ | |||||||
| """Init file for HassIO addons.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| from .addon import Addon |  | ||||||
| from .repository import Repository |  | ||||||
| from .data import Data |  | ||||||
| from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AddonManager(object): |  | ||||||
|     """Manage addons inside HassIO.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, docker): |  | ||||||
|         """Initialize docker base wrapper.""" |  | ||||||
|         self.loop = loop |  | ||||||
|         self.config = config |  | ||||||
|         self.docker = docker |  | ||||||
|         self.data = Data(config) |  | ||||||
|         self.addons = {} |  | ||||||
|         self.repositories = {} |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def list_addons(self): |  | ||||||
|         """Return a list of all addons.""" |  | ||||||
|         return list(self.addons.values()) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def list_repositories(self): |  | ||||||
|         """Return list of addon repositories.""" |  | ||||||
|         return list(self.repositories.values()) |  | ||||||
|  |  | ||||||
|     def get(self, addon_slug): |  | ||||||
|         """Return a adddon from slug.""" |  | ||||||
|         return self.addons.get(addon_slug) |  | ||||||
|  |  | ||||||
|     async def prepare(self): |  | ||||||
|         """Startup addon management.""" |  | ||||||
|         self.data.reload() |  | ||||||
|  |  | ||||||
|         # init hassio built-in repositories |  | ||||||
|         repositories = \ |  | ||||||
|             set(self.config.addons_repositories) | BUILTIN_REPOSITORIES |  | ||||||
|  |  | ||||||
|         # init custom repositories & load addons |  | ||||||
|         await self.load_repositories(repositories) |  | ||||||
|  |  | ||||||
|     async def reload(self): |  | ||||||
|         """Update addons from repo and reload list.""" |  | ||||||
|         tasks = [repository.update() for repository in |  | ||||||
|                  self.repositories.values()] |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|         # read data from repositories |  | ||||||
|         self.data.reload() |  | ||||||
|  |  | ||||||
|         # update addons |  | ||||||
|         await self.load_addons() |  | ||||||
|  |  | ||||||
|     async def load_repositories(self, list_repositories): |  | ||||||
|         """Add a new custom repository.""" |  | ||||||
|         new_rep = set(list_repositories) |  | ||||||
|         old_rep = set(self.repositories) |  | ||||||
|  |  | ||||||
|         # add new repository |  | ||||||
|         async def _add_repository(url): |  | ||||||
|             """Helper function to async add repository.""" |  | ||||||
|             repository = Repository(self.config, self.loop, self.data, url) |  | ||||||
|             if not await repository.load(): |  | ||||||
|                 _LOGGER.error("Can't load from repository %s", url) |  | ||||||
|                 return |  | ||||||
|             self.repositories[url] = repository |  | ||||||
|  |  | ||||||
|             # don't add built-in repository to config |  | ||||||
|             if url not in BUILTIN_REPOSITORIES: |  | ||||||
|                 self.config.add_addon_repository(url) |  | ||||||
|  |  | ||||||
|         tasks = [_add_repository(url) for url in new_rep - old_rep] |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|         # del new repository |  | ||||||
|         for url in old_rep - new_rep - BUILTIN_REPOSITORIES: |  | ||||||
|             self.repositories.pop(url).remove() |  | ||||||
|             self.config.drop_addon_repository(url) |  | ||||||
|  |  | ||||||
|         # update data |  | ||||||
|         self.data.reload() |  | ||||||
|         await self.load_addons() |  | ||||||
|  |  | ||||||
|     async def load_addons(self): |  | ||||||
|         """Update/add internal addon store.""" |  | ||||||
|         all_addons = set(self.data.system) | set(self.data.cache) |  | ||||||
|  |  | ||||||
|         # calc diff |  | ||||||
|         add_addons = all_addons - set(self.addons) |  | ||||||
|         del_addons = set(self.addons) - all_addons |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Load addons: %d all - %d new - %d remove", |  | ||||||
|                      len(all_addons), len(add_addons), len(del_addons)) |  | ||||||
|  |  | ||||||
|         # new addons |  | ||||||
|         tasks = [] |  | ||||||
|         for addon_slug in add_addons: |  | ||||||
|             addon = Addon( |  | ||||||
|                 self.config, self.loop, self.docker, self.data, addon_slug) |  | ||||||
|  |  | ||||||
|             tasks.append(addon.load()) |  | ||||||
|             self.addons[addon_slug] = addon |  | ||||||
|  |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|         # remove |  | ||||||
|         for addon_slug in del_addons: |  | ||||||
|             self.addons.pop(addon_slug) |  | ||||||
|  |  | ||||||
|     async def auto_boot(self, stage): |  | ||||||
|         """Boot addons with mode auto.""" |  | ||||||
|         tasks = [] |  | ||||||
|         for addon in self.addons.values(): |  | ||||||
|             if addon.is_installed and addon.boot == BOOT_AUTO and \ |  | ||||||
|                     addon.startup == stage: |  | ||||||
|                 tasks.append(addon.start()) |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Startup %s run %d addons", stage, len(tasks)) |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks, loop=self.loop) |  | ||||||
| @@ -1,715 +0,0 @@ | |||||||
| """Init file for HassIO addons.""" |  | ||||||
| from copy import deepcopy |  | ||||||
| import logging |  | ||||||
| import json |  | ||||||
| from pathlib import Path, PurePath |  | ||||||
| import re |  | ||||||
| import shutil |  | ||||||
| import tarfile |  | ||||||
| from tempfile import TemporaryDirectory |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
| from voluptuous.humanize import humanize_error |  | ||||||
|  |  | ||||||
| from .validate import ( |  | ||||||
|     validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME) |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP, |  | ||||||
|     ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY, |  | ||||||
|     ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT, |  | ||||||
|     ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID, |  | ||||||
|     STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM, |  | ||||||
|     ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI, |  | ||||||
|     ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT, |  | ||||||
|     ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC, |  | ||||||
|     ATTR_HOST_DBUS, ATTR_AUTO_UART) |  | ||||||
| from .util import check_installed |  | ||||||
| from ..dock.addon import DockerAddon |  | ||||||
| from ..tools import write_json_file, read_json_file |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| RE_WEBUI = re.compile( |  | ||||||
|     r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])" |  | ||||||
|     r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Addon(object): |  | ||||||
|     """Hold data for addon inside HassIO.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, docker, data, slug): |  | ||||||
|         """Initialize data holder.""" |  | ||||||
|         self.loop = loop |  | ||||||
|         self.config = config |  | ||||||
|         self.data = data |  | ||||||
|         self._id = slug |  | ||||||
|  |  | ||||||
|         self.docker = DockerAddon(config, loop, docker, self) |  | ||||||
|  |  | ||||||
|     async def load(self): |  | ||||||
|         """Async initialize of object.""" |  | ||||||
|         if self.is_installed: |  | ||||||
|             await self.docker.attach() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def slug(self): |  | ||||||
|         """Return slug/id of addon.""" |  | ||||||
|         return self._id |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def _mesh(self): |  | ||||||
|         """Return addon data from system or cache.""" |  | ||||||
|         return self.data.system.get(self._id, self.data.cache.get(self._id)) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_installed(self): |  | ||||||
|         """Return True if a addon is installed.""" |  | ||||||
|         return self._id in self.data.system |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_detached(self): |  | ||||||
|         """Return True if addon is detached.""" |  | ||||||
|         return self._id not in self.data.cache |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def version_installed(self): |  | ||||||
|         """Return installed version.""" |  | ||||||
|         return self.data.user.get(self._id, {}).get(ATTR_VERSION) |  | ||||||
|  |  | ||||||
|     def _set_install(self, version): |  | ||||||
|         """Set addon as installed.""" |  | ||||||
|         self.data.system[self._id] = deepcopy(self.data.cache[self._id]) |  | ||||||
|         self.data.user[self._id] = { |  | ||||||
|             ATTR_OPTIONS: {}, |  | ||||||
|             ATTR_VERSION: version, |  | ||||||
|         } |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     def _set_uninstall(self): |  | ||||||
|         """Set addon as uninstalled.""" |  | ||||||
|         self.data.system.pop(self._id, None) |  | ||||||
|         self.data.user.pop(self._id, None) |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     def _set_update(self, version): |  | ||||||
|         """Update version of addon.""" |  | ||||||
|         self.data.system[self._id] = deepcopy(self.data.cache[self._id]) |  | ||||||
|         self.data.user[self._id][ATTR_VERSION] = version |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     def _restore_data(self, user, system): |  | ||||||
|         """Restore data to addon.""" |  | ||||||
|         self.data.user[self._id] = deepcopy(user) |  | ||||||
|         self.data.system[self._id] = deepcopy(system) |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def options(self): |  | ||||||
|         """Return options with local changes.""" |  | ||||||
|         if self.is_installed: |  | ||||||
|             return { |  | ||||||
|                 **self.data.system[self._id][ATTR_OPTIONS], |  | ||||||
|                 **self.data.user[self._id][ATTR_OPTIONS] |  | ||||||
|             } |  | ||||||
|         return self.data.cache[self._id][ATTR_OPTIONS] |  | ||||||
|  |  | ||||||
|     @options.setter |  | ||||||
|     def options(self, value): |  | ||||||
|         """Store user addon options.""" |  | ||||||
|         self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value) |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def boot(self): |  | ||||||
|         """Return boot config with prio local settings.""" |  | ||||||
|         if ATTR_BOOT in self.data.user.get(self._id, {}): |  | ||||||
|             return self.data.user[self._id][ATTR_BOOT] |  | ||||||
|         return self._mesh[ATTR_BOOT] |  | ||||||
|  |  | ||||||
|     @boot.setter |  | ||||||
|     def boot(self, value): |  | ||||||
|         """Store user boot options.""" |  | ||||||
|         self.data.user[self._id][ATTR_BOOT] = value |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def auto_update(self): |  | ||||||
|         """Return if auto update is enable.""" |  | ||||||
|         if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}): |  | ||||||
|             return self.data.user[self._id][ATTR_AUTO_UPDATE] |  | ||||||
|  |  | ||||||
|     @auto_update.setter |  | ||||||
|     def auto_update(self, value): |  | ||||||
|         """Set auto update.""" |  | ||||||
|         self.data.user[self._id][ATTR_AUTO_UPDATE] = value |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return name of addon.""" |  | ||||||
|         return self._mesh[ATTR_NAME] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def timeout(self): |  | ||||||
|         """Return timeout of addon for docker stop.""" |  | ||||||
|         return self._mesh[ATTR_TIMEOUT] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def api_token(self): |  | ||||||
|         """Return a API token for this add-on.""" |  | ||||||
|         if self.is_installed: |  | ||||||
|             return self.data.user[self._id][ATTR_UUID] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def description(self): |  | ||||||
|         """Return description of addon.""" |  | ||||||
|         return self._mesh[ATTR_DESCRIPTON] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def repository(self): |  | ||||||
|         """Return repository of addon.""" |  | ||||||
|         return self._mesh[ATTR_REPOSITORY] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def last_version(self): |  | ||||||
|         """Return version of addon.""" |  | ||||||
|         if self._id in self.data.cache: |  | ||||||
|             return self.data.cache[self._id][ATTR_VERSION] |  | ||||||
|         return self.version_installed |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def startup(self): |  | ||||||
|         """Return startup type of addon.""" |  | ||||||
|         return self._mesh.get(ATTR_STARTUP) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def ports(self): |  | ||||||
|         """Return ports of addon.""" |  | ||||||
|         if self.host_network or ATTR_PORTS not in self._mesh: |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|         if not self.is_installed or \ |  | ||||||
|                 ATTR_NETWORK not in self.data.user[self._id]: |  | ||||||
|             return self._mesh[ATTR_PORTS] |  | ||||||
|         return self.data.user[self._id][ATTR_NETWORK] |  | ||||||
|  |  | ||||||
|     @ports.setter |  | ||||||
|     def ports(self, value): |  | ||||||
|         """Set custom ports of addon.""" |  | ||||||
|         if value is None: |  | ||||||
|             self.data.user[self._id].pop(ATTR_NETWORK, None) |  | ||||||
|         else: |  | ||||||
|             new_ports = {} |  | ||||||
|             for container_port, host_port in value.items(): |  | ||||||
|                 if container_port in self._mesh.get(ATTR_PORTS, {}): |  | ||||||
|                     new_ports[container_port] = host_port |  | ||||||
|  |  | ||||||
|             self.data.user[self._id][ATTR_NETWORK] = new_ports |  | ||||||
|  |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def webui(self): |  | ||||||
|         """Return URL to webui or None.""" |  | ||||||
|         if ATTR_WEBUI not in self._mesh: |  | ||||||
|             return None |  | ||||||
|         webui = RE_WEBUI.match(self._mesh[ATTR_WEBUI]) |  | ||||||
|  |  | ||||||
|         # extract arguments |  | ||||||
|         t_port = webui.group('t_port') |  | ||||||
|         t_proto = webui.group('t_proto') |  | ||||||
|         s_prefix = webui.group('s_prefix') or "" |  | ||||||
|         s_suffix = webui.group('s_suffix') or "" |  | ||||||
|  |  | ||||||
|         # search host port for this docker port |  | ||||||
|         if self.ports is None: |  | ||||||
|             port = t_port |  | ||||||
|         else: |  | ||||||
|             port = self.ports.get("{}/tcp".format(t_port), t_port) |  | ||||||
|  |  | ||||||
|         # for interface config or port lists |  | ||||||
|         if isinstance(port, (tuple, list)): |  | ||||||
|             port = port[-1] |  | ||||||
|  |  | ||||||
|         # lookup the correct protocol from config |  | ||||||
|         if t_proto: |  | ||||||
|             proto = 'https' if self.options[t_proto] else 'http' |  | ||||||
|         else: |  | ||||||
|             proto = s_prefix |  | ||||||
|  |  | ||||||
|         return "{}://[HOST]:{}{}".format(proto, port, s_suffix) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def host_network(self): |  | ||||||
|         """Return True if addon run on host network.""" |  | ||||||
|         return self._mesh[ATTR_HOST_NETWORK] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def host_ipc(self): |  | ||||||
|         """Return True if addon run on host IPC namespace.""" |  | ||||||
|         return self._mesh[ATTR_HOST_IPC] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def host_dbus(self): |  | ||||||
|         """Return True if addon run on host DBUS.""" |  | ||||||
|         return self._mesh[ATTR_HOST_DBUS] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def devices(self): |  | ||||||
|         """Return devices of addon.""" |  | ||||||
|         return self._mesh.get(ATTR_DEVICES) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def auto_uart(self): |  | ||||||
|         """Return True if we should map all uart device.""" |  | ||||||
|         return self._mesh.get(ATTR_AUTO_UART) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def tmpfs(self): |  | ||||||
|         """Return tmpfs of addon.""" |  | ||||||
|         return self._mesh.get(ATTR_TMPFS) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def environment(self): |  | ||||||
|         """Return environment of addon.""" |  | ||||||
|         return self._mesh.get(ATTR_ENVIRONMENT) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def privileged(self): |  | ||||||
|         """Return list of privilege.""" |  | ||||||
|         return self._mesh.get(ATTR_PRIVILEGED) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def legacy(self): |  | ||||||
|         """Return if the add-on don't support hass labels.""" |  | ||||||
|         return self._mesh.get(ATTR_LEGACY) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def access_hassio_api(self): |  | ||||||
|         """Return True if the add-on access to hassio api.""" |  | ||||||
|         return self._mesh[ATTR_HASSIO_API] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def access_homeassistant_api(self): |  | ||||||
|         """Return True if the add-on access to Home-Assistant api proxy.""" |  | ||||||
|         return self._mesh[ATTR_HOMEASSISTANT_API] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def with_stdin(self): |  | ||||||
|         """Return True if the add-on access use stdin input.""" |  | ||||||
|         return self._mesh[ATTR_STDIN] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def with_gpio(self): |  | ||||||
|         """Return True if the add-on access to gpio interface.""" |  | ||||||
|         return self._mesh[ATTR_GPIO] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def with_audio(self): |  | ||||||
|         """Return True if the add-on access to audio.""" |  | ||||||
|         return self._mesh[ATTR_AUDIO] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def audio_output(self): |  | ||||||
|         """Return ALSA config for output or None.""" |  | ||||||
|         if not self.with_audio: |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|         setting = self.config.audio_output |  | ||||||
|         if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]: |  | ||||||
|             setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT] |  | ||||||
|         return setting |  | ||||||
|  |  | ||||||
|     @audio_output.setter |  | ||||||
|     def audio_output(self, value): |  | ||||||
|         """Set/remove custom audio output settings.""" |  | ||||||
|         if value is None: |  | ||||||
|             self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None) |  | ||||||
|         else: |  | ||||||
|             self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def audio_input(self): |  | ||||||
|         """Return ALSA config for input or None.""" |  | ||||||
|         if not self.with_audio: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         setting = self.config.audio_input |  | ||||||
|         if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]: |  | ||||||
|             setting = self.data.user[self._id][ATTR_AUDIO_INPUT] |  | ||||||
|         return setting |  | ||||||
|  |  | ||||||
|     @audio_input.setter |  | ||||||
|     def audio_input(self, value): |  | ||||||
|         """Set/remove custom audio input settings.""" |  | ||||||
|         if value is None: |  | ||||||
|             self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None) |  | ||||||
|         else: |  | ||||||
|             self.data.user[self._id][ATTR_AUDIO_INPUT] = value |  | ||||||
|         self.data.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def url(self): |  | ||||||
|         """Return url of addon.""" |  | ||||||
|         return self._mesh.get(ATTR_URL) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def with_logo(self): |  | ||||||
|         """Return True if a logo exists.""" |  | ||||||
|         return self.path_logo.exists() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def with_changelog(self): |  | ||||||
|         """Return True if a changelog exists.""" |  | ||||||
|         return self.path_changelog.exists() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def supported_arch(self): |  | ||||||
|         """Return list of supported arch.""" |  | ||||||
|         return self._mesh[ATTR_ARCH] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def image(self): |  | ||||||
|         """Return image name of addon.""" |  | ||||||
|         addon_data = self._mesh |  | ||||||
|  |  | ||||||
|         # Repository with dockerhub images |  | ||||||
|         if ATTR_IMAGE in addon_data: |  | ||||||
|             return addon_data[ATTR_IMAGE].format(arch=self.config.arch) |  | ||||||
|  |  | ||||||
|         # local build |  | ||||||
|         return "{}/{}-addon-{}".format( |  | ||||||
|             addon_data[ATTR_REPOSITORY], self.config.arch, |  | ||||||
|             addon_data[ATTR_SLUG]) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def need_build(self): |  | ||||||
|         """Return True if this  addon need a local build.""" |  | ||||||
|         return ATTR_IMAGE not in self._mesh |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def map_volumes(self): |  | ||||||
|         """Return a dict of {volume: policy} from addon.""" |  | ||||||
|         volumes = {} |  | ||||||
|         for volume in self._mesh[ATTR_MAP]: |  | ||||||
|             result = RE_VOLUME.match(volume) |  | ||||||
|             volumes[result.group(1)] = result.group(2) or 'ro' |  | ||||||
|  |  | ||||||
|         return volumes |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_data(self): |  | ||||||
|         """Return addon data path inside supervisor.""" |  | ||||||
|         return Path(self.config.path_addons_data, self._id) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_data(self): |  | ||||||
|         """Return addon data path external for docker.""" |  | ||||||
|         return PurePath(self.config.path_extern_addons_data, self._id) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_options(self): |  | ||||||
|         """Return path to addons options.""" |  | ||||||
|         return Path(self.path_data, "options.json") |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_location(self): |  | ||||||
|         """Return path to this addon.""" |  | ||||||
|         return Path(self._mesh[ATTR_LOCATON]) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_logo(self): |  | ||||||
|         """Return path to addon logo.""" |  | ||||||
|         return Path(self.path_location, 'logo.png') |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_changelog(self): |  | ||||||
|         """Return path to addon changelog.""" |  | ||||||
|         return Path(self.path_location, 'CHANGELOG.md') |  | ||||||
|  |  | ||||||
|     def write_options(self): |  | ||||||
|         """Return True if addon options is written to data.""" |  | ||||||
|         schema = self.schema |  | ||||||
|         options = self.options |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             schema(options) |  | ||||||
|             return write_json_file(self.path_options, options) |  | ||||||
|         except vol.Invalid as ex: |  | ||||||
|             _LOGGER.error("Addon %s have wrong options -> %s", self._id, |  | ||||||
|                           humanize_error(options, ex)) |  | ||||||
|  |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def schema(self): |  | ||||||
|         """Create a schema for addon options.""" |  | ||||||
|         raw_schema = self._mesh[ATTR_SCHEMA] |  | ||||||
|  |  | ||||||
|         if isinstance(raw_schema, bool): |  | ||||||
|             return vol.Schema(dict) |  | ||||||
|         return vol.Schema(vol.All(dict, validate_options(raw_schema))) |  | ||||||
|  |  | ||||||
|     def test_udpate_schema(self): |  | ||||||
|         """Check if the exists config valid after update.""" |  | ||||||
|         if not self.is_installed or self.is_detached: |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         # load next schema |  | ||||||
|         new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA] |  | ||||||
|         default_options = self.data.cache[self._id][ATTR_OPTIONS] |  | ||||||
|  |  | ||||||
|         # if disabled |  | ||||||
|         if isinstance(new_raw_schema, bool): |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         # merge options |  | ||||||
|         options = { |  | ||||||
|             **self.data.user[self._id][ATTR_OPTIONS], |  | ||||||
|             **default_options, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         # create voluptuous |  | ||||||
|         new_schema = \ |  | ||||||
|             vol.Schema(vol.All(dict, validate_options(new_raw_schema))) |  | ||||||
|  |  | ||||||
|         # validate |  | ||||||
|         try: |  | ||||||
|             new_schema(options) |  | ||||||
|         except vol.Invalid: |  | ||||||
|             return False |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def install(self): |  | ||||||
|         """Install a addon.""" |  | ||||||
|         if self.config.arch not in self.supported_arch: |  | ||||||
|             _LOGGER.error( |  | ||||||
|                 "Addon %s not supported on %s", self._id, self.config.arch) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if self.is_installed: |  | ||||||
|             _LOGGER.error("Addon %s is already installed", self._id) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if not self.path_data.is_dir(): |  | ||||||
|             _LOGGER.info( |  | ||||||
|                 "Create Home-Assistant addon data folder %s", self.path_data) |  | ||||||
|             self.path_data.mkdir() |  | ||||||
|  |  | ||||||
|         if not await self.docker.install(self.last_version): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         self._set_install(self.last_version) |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     async def uninstall(self): |  | ||||||
|         """Remove a addon.""" |  | ||||||
|         if not await self.docker.remove(): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if self.path_data.is_dir(): |  | ||||||
|             _LOGGER.info( |  | ||||||
|                 "Remove Home-Assistant addon data folder %s", self.path_data) |  | ||||||
|             shutil.rmtree(str(self.path_data)) |  | ||||||
|  |  | ||||||
|         self._set_uninstall() |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def state(self): |  | ||||||
|         """Return running state of addon.""" |  | ||||||
|         if not self.is_installed: |  | ||||||
|             return STATE_NONE |  | ||||||
|  |  | ||||||
|         if await self.docker.is_running(): |  | ||||||
|             return STATE_STARTED |  | ||||||
|         return STATE_STOPPED |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     def start(self): |  | ||||||
|         """Set options and start addon. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.run() |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     def stop(self): |  | ||||||
|         """Stop addon. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.stop() |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     async def update(self): |  | ||||||
|         """Update addon.""" |  | ||||||
|         last_state = await self.state() |  | ||||||
|  |  | ||||||
|         if self.last_version == self.version_installed: |  | ||||||
|             _LOGGER.warning( |  | ||||||
|                 "No update available for Addon %s", self._id) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if not await self.docker.update(self.last_version): |  | ||||||
|             return False |  | ||||||
|         self._set_update(self.last_version) |  | ||||||
|  |  | ||||||
|         # restore state |  | ||||||
|         if last_state == STATE_STARTED: |  | ||||||
|             await self.docker.run() |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     def restart(self): |  | ||||||
|         """Restart addon. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.restart() |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     def logs(self): |  | ||||||
|         """Return addons log output. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.logs() |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     async def rebuild(self): |  | ||||||
|         """Performe a rebuild of local build addon.""" |  | ||||||
|         last_state = await self.state() |  | ||||||
|  |  | ||||||
|         if not self.need_build: |  | ||||||
|             _LOGGER.error("Can't rebuild a none local build addon!") |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # remove docker container but not addon config |  | ||||||
|         if not await self.docker.remove(): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if not await self.docker.install(self.version_installed): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # restore state |  | ||||||
|         if last_state == STATE_STARTED: |  | ||||||
|             await self.docker.run() |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     async def write_stdin(self, data): |  | ||||||
|         """Write data to add-on stdin. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         if not self.with_stdin: |  | ||||||
|             _LOGGER.error("Add-on don't support write to stdin!") |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return await self.docker.write_stdin(data) |  | ||||||
|  |  | ||||||
|     @check_installed |  | ||||||
|     async def snapshot(self, tar_file): |  | ||||||
|         """Snapshot a state of a addon.""" |  | ||||||
|         with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp: |  | ||||||
|             # store local image |  | ||||||
|             if self.need_build and not await \ |  | ||||||
|                     self.docker.export_image(Path(temp, "image.tar")): |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             data = { |  | ||||||
|                 ATTR_USER: self.data.user.get(self._id, {}), |  | ||||||
|                 ATTR_SYSTEM: self.data.system.get(self._id, {}), |  | ||||||
|                 ATTR_VERSION: self.version_installed, |  | ||||||
|                 ATTR_STATE: await self.state(), |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|             # store local configs/state |  | ||||||
|             if not write_json_file(Path(temp, "addon.json"), data): |  | ||||||
|                 _LOGGER.error("Can't write addon.json for %s", self._id) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             # write into tarfile |  | ||||||
|             def _create_tar(): |  | ||||||
|                 """Write tar inside loop.""" |  | ||||||
|                 with tarfile.open(tar_file, "w:gz", |  | ||||||
|                                   compresslevel=1) as snapshot: |  | ||||||
|                     snapshot.add(temp, arcname=".") |  | ||||||
|                     snapshot.add(self.path_data, arcname="data") |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 _LOGGER.info("Build snapshot for addon %s", self._id) |  | ||||||
|                 await self.loop.run_in_executor(None, _create_tar) |  | ||||||
|             except tarfile.TarError as err: |  | ||||||
|                 _LOGGER.error("Can't write tarfile %s -> %s", tar_file, err) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Finish snapshot for addon %s", self._id) |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def restore(self, tar_file): |  | ||||||
|         """Restore a state of a addon.""" |  | ||||||
|         with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp: |  | ||||||
|             # extract snapshot |  | ||||||
|             def _extract_tar(): |  | ||||||
|                 """Extract tar snapshot.""" |  | ||||||
|                 with tarfile.open(tar_file, "r:gz") as snapshot: |  | ||||||
|                     snapshot.extractall(path=Path(temp)) |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 await self.loop.run_in_executor(None, _extract_tar) |  | ||||||
|             except tarfile.TarError as err: |  | ||||||
|                 _LOGGER.error("Can't read tarfile %s -> %s", tar_file, err) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             # read snapshot data |  | ||||||
|             try: |  | ||||||
|                 data = read_json_file(Path(temp, "addon.json")) |  | ||||||
|             except (OSError, json.JSONDecodeError) as err: |  | ||||||
|                 _LOGGER.error("Can't read addon.json -> %s", err) |  | ||||||
|  |  | ||||||
|             # validate |  | ||||||
|             try: |  | ||||||
|                 data = SCHEMA_ADDON_SNAPSHOT(data) |  | ||||||
|             except vol.Invalid as err: |  | ||||||
|                 _LOGGER.error("Can't validate %s, snapshot data -> %s", |  | ||||||
|                               self._id, humanize_error(data, err)) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             # restore data / reload addon |  | ||||||
|             self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM]) |  | ||||||
|  |  | ||||||
|             # check version / restore image |  | ||||||
|             version = data[ATTR_VERSION] |  | ||||||
|             if version != self.docker.version: |  | ||||||
|                 image_file = Path(temp, "image.tar") |  | ||||||
|                 if image_file.is_file(): |  | ||||||
|                     await self.docker.import_image(image_file, version) |  | ||||||
|                 else: |  | ||||||
|                     if await self.docker.install(version): |  | ||||||
|                         await self.docker.cleanup() |  | ||||||
|             else: |  | ||||||
|                 await self.docker.stop() |  | ||||||
|  |  | ||||||
|             # restore data |  | ||||||
|             def _restore_data(): |  | ||||||
|                 """Restore data.""" |  | ||||||
|                 if self.path_data.is_dir(): |  | ||||||
|                     shutil.rmtree(str(self.path_data), ignore_errors=True) |  | ||||||
|                 shutil.copytree(str(Path(temp, "data")), str(self.path_data)) |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 _LOGGER.info("Restore data for addon %s", self._id) |  | ||||||
|                 await self.loop.run_in_executor(None, _restore_data) |  | ||||||
|             except shutil.Error as err: |  | ||||||
|                 _LOGGER.error("Can't restore origin data -> %s", err) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             # run addon |  | ||||||
|             if data[ATTR_STATE] == STATE_STARTED: |  | ||||||
|                 return await self.start() |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Finish restore for addon %s", self._id) |  | ||||||
|         return True |  | ||||||
| @@ -1,65 +0,0 @@ | |||||||
| """HassIO addons build environment.""" |  | ||||||
| from pathlib import Path |  | ||||||
|  |  | ||||||
| from .validate import SCHEMA_BUILD_CONFIG |  | ||||||
| from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON |  | ||||||
| from ..tools import JsonConfig |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AddonBuild(JsonConfig): |  | ||||||
|     """Handle build options for addons.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, addon): |  | ||||||
|         """Initialize addon builder.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.addon = addon |  | ||||||
|  |  | ||||||
|         super().__init__( |  | ||||||
|             Path(addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG) |  | ||||||
|  |  | ||||||
|     def save(self): |  | ||||||
|         """Ignore save function.""" |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def base_image(self): |  | ||||||
|         """Base images for this addon.""" |  | ||||||
|         return self._data[ATTR_BUILD_FROM][self.config.arch] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def squash(self): |  | ||||||
|         """Return True or False if squash is active.""" |  | ||||||
|         return self._data[ATTR_SQUASH] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def additional_args(self): |  | ||||||
|         """Return additional docker build arguments.""" |  | ||||||
|         return self._data[ATTR_ARGS] |  | ||||||
|  |  | ||||||
|     def get_docker_args(self, version): |  | ||||||
|         """Create a dict with docker build arguments.""" |  | ||||||
|         args = { |  | ||||||
|             'path': str(self.addon.path_location), |  | ||||||
|             'tag': "{}:{}".format(self.addon.image, version), |  | ||||||
|             'pull': True, |  | ||||||
|             'forcerm': True, |  | ||||||
|             'squash': self.squash, |  | ||||||
|             'labels': { |  | ||||||
|                 'io.hass.version': version, |  | ||||||
|                 'io.hass.arch': self.config.arch, |  | ||||||
|                 'io.hass.type': META_ADDON, |  | ||||||
|                 'io.hass.name': self.addon.name, |  | ||||||
|                 'io.hass.description': self.addon.description, |  | ||||||
|             }, |  | ||||||
|             'buildargs': { |  | ||||||
|                 'BUILD_FROM': self.base_image, |  | ||||||
|                 'BUILD_VERSION': version, |  | ||||||
|                 'BUILD_ARCH': self.config.arch, |  | ||||||
|                 **self.additional_args, |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         if self.addon.url: |  | ||||||
|             args['labels']['io.hass.url'] = self.addon.url |  | ||||||
|  |  | ||||||
|         return args |  | ||||||
| @@ -1,12 +0,0 @@ | |||||||
| { |  | ||||||
|     "local": { |  | ||||||
|         "name": "Local Add-Ons", |  | ||||||
|         "url": "https://home-assistant.io/hassio", |  | ||||||
|         "maintainer": "By our self" |  | ||||||
|     }, |  | ||||||
|     "core": { |  | ||||||
|         "name": "Built-in Add-Ons", |  | ||||||
|         "url": "https://home-assistant.io/addons", |  | ||||||
|         "maintainer": "Home Assistant authors" |  | ||||||
|     } |  | ||||||
| } |  | ||||||
| @@ -1,161 +0,0 @@ | |||||||
| """Init file for HassIO addons.""" |  | ||||||
| import copy |  | ||||||
| import logging |  | ||||||
| import json |  | ||||||
| from pathlib import Path |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
| from voluptuous.humanize import humanize_error |  | ||||||
|  |  | ||||||
| from .util import extract_hash_from_path |  | ||||||
| from .validate import ( |  | ||||||
|     SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG) |  | ||||||
| from ..const import ( |  | ||||||
|     FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON, |  | ||||||
|     REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM) |  | ||||||
| from ..tools import JsonConfig, read_json_file |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Data(JsonConfig): |  | ||||||
|     """Hold data for addons inside HassIO.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config): |  | ||||||
|         """Initialize data holder.""" |  | ||||||
|         super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE) |  | ||||||
|         self.config = config |  | ||||||
|         self._repositories = {} |  | ||||||
|         self._cache = {} |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def user(self): |  | ||||||
|         """Return local addon user data.""" |  | ||||||
|         return self._data[ATTR_USER] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def system(self): |  | ||||||
|         """Return local addon data.""" |  | ||||||
|         return self._data[ATTR_SYSTEM] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def cache(self): |  | ||||||
|         """Return addon data from cache/repositories.""" |  | ||||||
|         return self._cache |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def repositories(self): |  | ||||||
|         """Return addon data from repositories.""" |  | ||||||
|         return self._repositories |  | ||||||
|  |  | ||||||
|     def reload(self): |  | ||||||
|         """Read data from addons repository.""" |  | ||||||
|         self._cache = {} |  | ||||||
|         self._repositories = {} |  | ||||||
|  |  | ||||||
|         # read core repository |  | ||||||
|         self._read_addons_folder( |  | ||||||
|             self.config.path_addons_core, REPOSITORY_CORE) |  | ||||||
|  |  | ||||||
|         # read local repository |  | ||||||
|         self._read_addons_folder( |  | ||||||
|             self.config.path_addons_local, REPOSITORY_LOCAL) |  | ||||||
|  |  | ||||||
|         # add built-in repositories information |  | ||||||
|         self._set_builtin_repositories() |  | ||||||
|  |  | ||||||
|         # read custom git repositories |  | ||||||
|         for repository_element in self.config.path_addons_git.iterdir(): |  | ||||||
|             if repository_element.is_dir(): |  | ||||||
|                 self._read_git_repository(repository_element) |  | ||||||
|  |  | ||||||
|         # update local data |  | ||||||
|         self._merge_config() |  | ||||||
|  |  | ||||||
|     def _read_git_repository(self, path): |  | ||||||
|         """Process a custom repository folder.""" |  | ||||||
|         slug = extract_hash_from_path(path) |  | ||||||
|  |  | ||||||
|         # exists repository json |  | ||||||
|         repository_file = Path(path, "repository.json") |  | ||||||
|         try: |  | ||||||
|             repository_info = SCHEMA_REPOSITORY_CONFIG( |  | ||||||
|                 read_json_file(repository_file) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         except (OSError, json.JSONDecodeError): |  | ||||||
|             _LOGGER.warning("Can't read repository information from %s", |  | ||||||
|                             repository_file) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         except vol.Invalid: |  | ||||||
|             _LOGGER.warning("Repository parse error %s", repository_file) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # process data |  | ||||||
|         self._repositories[slug] = repository_info |  | ||||||
|         self._read_addons_folder(path, slug) |  | ||||||
|  |  | ||||||
|     def _read_addons_folder(self, path, repository): |  | ||||||
|         """Read data from addons folder.""" |  | ||||||
|         for addon in path.glob("**/config.json"): |  | ||||||
|             try: |  | ||||||
|                 addon_config = read_json_file(addon) |  | ||||||
|  |  | ||||||
|                 # validate |  | ||||||
|                 addon_config = SCHEMA_ADDON_CONFIG(addon_config) |  | ||||||
|  |  | ||||||
|                 # Generate slug |  | ||||||
|                 addon_slug = "{}_{}".format( |  | ||||||
|                     repository, addon_config[ATTR_SLUG]) |  | ||||||
|  |  | ||||||
|                 # store |  | ||||||
|                 addon_config[ATTR_REPOSITORY] = repository |  | ||||||
|                 addon_config[ATTR_LOCATON] = str(addon.parent) |  | ||||||
|                 self._cache[addon_slug] = addon_config |  | ||||||
|  |  | ||||||
|             except (OSError, json.JSONDecodeError): |  | ||||||
|                 _LOGGER.warning("Can't read %s", addon) |  | ||||||
|  |  | ||||||
|             except vol.Invalid as ex: |  | ||||||
|                 _LOGGER.warning("Can't read %s -> %s", addon, |  | ||||||
|                                 humanize_error(addon_config, ex)) |  | ||||||
|  |  | ||||||
|     def _set_builtin_repositories(self): |  | ||||||
|         """Add local built-in repository into dataset.""" |  | ||||||
|         try: |  | ||||||
|             builtin_file = Path(__file__).parent.joinpath('built-in.json') |  | ||||||
|             builtin_data = read_json_file(builtin_file) |  | ||||||
|         except (OSError, json.JSONDecodeError) as err: |  | ||||||
|             _LOGGER.warning("Can't read built-in.json -> %s", err) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # core repository |  | ||||||
|         self._repositories[REPOSITORY_CORE] = \ |  | ||||||
|             builtin_data[REPOSITORY_CORE] |  | ||||||
|  |  | ||||||
|         # local repository |  | ||||||
|         self._repositories[REPOSITORY_LOCAL] = \ |  | ||||||
|             builtin_data[REPOSITORY_LOCAL] |  | ||||||
|  |  | ||||||
|     def _merge_config(self): |  | ||||||
|         """Update local config if they have update. |  | ||||||
|  |  | ||||||
|         It need to be the same version as the local version is for merge. |  | ||||||
|         """ |  | ||||||
|         have_change = False |  | ||||||
|  |  | ||||||
|         for addon in set(self.system): |  | ||||||
|             # detached |  | ||||||
|             if addon not in self._cache: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             cache = self._cache[addon] |  | ||||||
|             data = self.system[addon] |  | ||||||
|             if data[ATTR_VERSION] == cache[ATTR_VERSION]: |  | ||||||
|                 if data != cache: |  | ||||||
|                     self.system[addon] = copy.deepcopy(cache) |  | ||||||
|                     have_change = True |  | ||||||
|  |  | ||||||
|         if have_change: |  | ||||||
|             self.save() |  | ||||||
| @@ -1,110 +0,0 @@ | |||||||
| """Init file for HassIO addons git.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
| import functools as ft |  | ||||||
| from pathlib import Path |  | ||||||
| import shutil |  | ||||||
|  |  | ||||||
| import git |  | ||||||
|  |  | ||||||
| from .util import get_hash_from_repository |  | ||||||
| from ..const import URL_HASSIO_ADDONS |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GitRepo(object): |  | ||||||
|     """Manage addons git repo.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, path, url): |  | ||||||
|         """Initialize git base wrapper.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.repo = None |  | ||||||
|         self.path = path |  | ||||||
|         self.url = url |  | ||||||
|         self._lock = asyncio.Lock(loop=loop) |  | ||||||
|  |  | ||||||
|     async def load(self): |  | ||||||
|         """Init git addon repo.""" |  | ||||||
|         if not self.path.is_dir(): |  | ||||||
|             return await self.clone() |  | ||||||
|  |  | ||||||
|         async with self._lock: |  | ||||||
|             try: |  | ||||||
|                 _LOGGER.info("Load addon %s repository", self.path) |  | ||||||
|                 self.repo = await self.loop.run_in_executor( |  | ||||||
|                     None, git.Repo, str(self.path)) |  | ||||||
|  |  | ||||||
|             except (git.InvalidGitRepositoryError, git.NoSuchPathError, |  | ||||||
|                     git.GitCommandError) as err: |  | ||||||
|                 _LOGGER.error("Can't load %s repo: %s.", self.path, err) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|     async def clone(self): |  | ||||||
|         """Clone git addon repo.""" |  | ||||||
|         async with self._lock: |  | ||||||
|             try: |  | ||||||
|                 _LOGGER.info("Clone addon %s repository", self.url) |  | ||||||
|                 self.repo = await self.loop.run_in_executor( |  | ||||||
|                     None, ft.partial( |  | ||||||
|                         git.Repo.clone_from, self.url, str(self.path), |  | ||||||
|                         recursive=True)) |  | ||||||
|  |  | ||||||
|             except (git.InvalidGitRepositoryError, git.NoSuchPathError, |  | ||||||
|                     git.GitCommandError) as err: |  | ||||||
|                 _LOGGER.error("Can't clone %s repo: %s.", self.url, err) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|     async def pull(self): |  | ||||||
|         """Pull git addon repo.""" |  | ||||||
|         if self._lock.locked(): |  | ||||||
|             _LOGGER.warning("It is already a task in progress.") |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         async with self._lock: |  | ||||||
|             try: |  | ||||||
|                 _LOGGER.info("Pull addon %s repository", self.url) |  | ||||||
|                 await self.loop.run_in_executor( |  | ||||||
|                     None, self.repo.remotes.origin.pull) |  | ||||||
|  |  | ||||||
|             except (git.InvalidGitRepositoryError, git.NoSuchPathError, |  | ||||||
|                     git.GitCommandError) as err: |  | ||||||
|                 _LOGGER.error("Can't pull %s repo: %s.", self.url, err) |  | ||||||
|                 return False |  | ||||||
|  |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GitRepoHassIO(GitRepo): |  | ||||||
|     """HassIO addons repository.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop): |  | ||||||
|         """Initialize git hassio addon repository.""" |  | ||||||
|         super().__init__( |  | ||||||
|             config, loop, config.path_addons_core, URL_HASSIO_ADDONS) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GitRepoCustom(GitRepo): |  | ||||||
|     """Custom addons repository.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, url): |  | ||||||
|         """Initialize git hassio addon repository.""" |  | ||||||
|         path = Path(config.path_addons_git, get_hash_from_repository(url)) |  | ||||||
|  |  | ||||||
|         super().__init__(config, loop, path, url) |  | ||||||
|  |  | ||||||
|     def remove(self): |  | ||||||
|         """Remove a custom addon.""" |  | ||||||
|         if self.path.is_dir(): |  | ||||||
|             _LOGGER.info("Remove custom addon repository %s", self.url) |  | ||||||
|  |  | ||||||
|             def log_err(funct, path, _): |  | ||||||
|                 """Log error.""" |  | ||||||
|                 _LOGGER.warning("Can't remove %s", path) |  | ||||||
|  |  | ||||||
|             shutil.rmtree(str(self.path), onerror=log_err) |  | ||||||
| @@ -1,71 +0,0 @@ | |||||||
| """Represent a HassIO repository.""" |  | ||||||
| from .git import GitRepoHassIO, GitRepoCustom |  | ||||||
| from .util import get_hash_from_repository |  | ||||||
| from ..const import ( |  | ||||||
|     REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER) |  | ||||||
|  |  | ||||||
| UNKNOWN = 'unknown' |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Repository(object): |  | ||||||
|     """Repository in HassIO.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, data, repository): |  | ||||||
|         """Initialize repository object.""" |  | ||||||
|         self.data = data |  | ||||||
|         self.source = None |  | ||||||
|         self.git = None |  | ||||||
|  |  | ||||||
|         if repository == REPOSITORY_LOCAL: |  | ||||||
|             self._id = repository |  | ||||||
|         elif repository == REPOSITORY_CORE: |  | ||||||
|             self._id = repository |  | ||||||
|             self.git = GitRepoHassIO(config, loop) |  | ||||||
|         else: |  | ||||||
|             self._id = get_hash_from_repository(repository) |  | ||||||
|             self.git = GitRepoCustom(config, loop, repository) |  | ||||||
|             self.source = repository |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def _mesh(self): |  | ||||||
|         """Return data struct repository.""" |  | ||||||
|         return self.data.repositories.get(self._id, {}) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def slug(self): |  | ||||||
|         """Return slug of repository.""" |  | ||||||
|         return self._id |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return name of repository.""" |  | ||||||
|         return self._mesh.get(ATTR_NAME, UNKNOWN) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def url(self): |  | ||||||
|         """Return url of repository.""" |  | ||||||
|         return self._mesh.get(ATTR_URL, self.source) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def maintainer(self): |  | ||||||
|         """Return url of repository.""" |  | ||||||
|         return self._mesh.get(ATTR_MAINTAINER, UNKNOWN) |  | ||||||
|  |  | ||||||
|     async def load(self): |  | ||||||
|         """Load addon repository.""" |  | ||||||
|         if self.git: |  | ||||||
|             return await self.git.load() |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def update(self): |  | ||||||
|         """Update addon repository.""" |  | ||||||
|         if self.git: |  | ||||||
|             return await self.git.pull() |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def remove(self): |  | ||||||
|         """Remove addon repository.""" |  | ||||||
|         if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL): |  | ||||||
|             raise RuntimeError("Can't remove built-in repositories!") |  | ||||||
|  |  | ||||||
|         self.git.remove() |  | ||||||
| @@ -1,35 +0,0 @@ | |||||||
| """Util addons functions.""" |  | ||||||
| import hashlib |  | ||||||
| import logging |  | ||||||
| import re |  | ||||||
|  |  | ||||||
| RE_SHA1 = re.compile(r"[a-f0-9]{8}") |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_hash_from_repository(name): |  | ||||||
|     """Generate a hash from repository.""" |  | ||||||
|     key = name.lower().encode() |  | ||||||
|     return hashlib.sha1(key).hexdigest()[:8] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def extract_hash_from_path(path): |  | ||||||
|     """Extract repo id from path.""" |  | ||||||
|     repo_dir = path.parts[-1] |  | ||||||
|  |  | ||||||
|     if not RE_SHA1.match(repo_dir): |  | ||||||
|         return get_hash_from_repository(repo_dir) |  | ||||||
|     return repo_dir |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def check_installed(method): |  | ||||||
|     """Wrap function with check if addon is installed.""" |  | ||||||
|     async def wrap_check(addon, *args, **kwargs): |  | ||||||
|         """Return False if not installed or the function.""" |  | ||||||
|         if not addon.is_installed: |  | ||||||
|             _LOGGER.error("Addon %s is not installed", addon.slug) |  | ||||||
|             return False |  | ||||||
|         return await method(addon, *args, **kwargs) |  | ||||||
|  |  | ||||||
|     return wrap_check |  | ||||||
| @@ -1,302 +0,0 @@ | |||||||
| """Validate addons options schema.""" |  | ||||||
| import logging |  | ||||||
| import re |  | ||||||
| import uuid |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP, |  | ||||||
|     ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, |  | ||||||
|     STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE, |  | ||||||
|     BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER, |  | ||||||
|     ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF, |  | ||||||
|     ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED, |  | ||||||
|     ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED, |  | ||||||
|     ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID, |  | ||||||
|     ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC, |  | ||||||
|     ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH, |  | ||||||
|     ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, |  | ||||||
|     ATTR_HOST_DBUS, ATTR_AUTO_UART) |  | ||||||
| from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$") |  | ||||||
|  |  | ||||||
| V_STR = 'str' |  | ||||||
| V_INT = 'int' |  | ||||||
| V_FLOAT = 'float' |  | ||||||
| V_BOOL = 'bool' |  | ||||||
| V_EMAIL = 'email' |  | ||||||
| V_URL = 'url' |  | ||||||
| V_PORT = 'port' |  | ||||||
| V_MATCH = 'match' |  | ||||||
|  |  | ||||||
| RE_SCHEMA_ELEMENT = re.compile( |  | ||||||
|     r"^(?:" |  | ||||||
|     r"|str|bool|email|url|port" |  | ||||||
|     r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?" |  | ||||||
|     r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?" |  | ||||||
|     r"|match\((?P<match>.*)\)" |  | ||||||
|     r")\??$" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT) |  | ||||||
|  |  | ||||||
| ARCH_ALL = [ |  | ||||||
|     ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386 |  | ||||||
| ] |  | ||||||
|  |  | ||||||
| STARTUP_ALL = [ |  | ||||||
|     STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES, |  | ||||||
|     STARTUP_APPLICATION |  | ||||||
| ] |  | ||||||
|  |  | ||||||
| PRIVILEGED_ALL = [ |  | ||||||
|     "NET_ADMIN", |  | ||||||
|     "SYS_ADMIN", |  | ||||||
|     "SYS_RAWIO" |  | ||||||
| ] |  | ||||||
|  |  | ||||||
| BASE_IMAGE = { |  | ||||||
|     ARCH_ARMHF: "homeassistant/armhf-base:latest", |  | ||||||
|     ARCH_AARCH64: "homeassistant/aarch64-base:latest", |  | ||||||
|     ARCH_I386: "homeassistant/i386-base:latest", |  | ||||||
|     ARCH_AMD64: "homeassistant/amd64-base:latest", |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _simple_startup(value): |  | ||||||
|     """Simple startup schema.""" |  | ||||||
|     if value == "before": |  | ||||||
|         return STARTUP_SERVICES |  | ||||||
|     if value == "after": |  | ||||||
|         return STARTUP_APPLICATION |  | ||||||
|     return value |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_ADDON_CONFIG = vol.Schema({ |  | ||||||
|     vol.Required(ATTR_NAME): vol.Coerce(str), |  | ||||||
|     vol.Required(ATTR_VERSION): vol.Coerce(str), |  | ||||||
|     vol.Required(ATTR_SLUG): vol.Coerce(str), |  | ||||||
|     vol.Required(ATTR_DESCRIPTON): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_URL): vol.Url(), |  | ||||||
|     vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)], |  | ||||||
|     vol.Required(ATTR_STARTUP): |  | ||||||
|         vol.All(_simple_startup, vol.In(STARTUP_ALL)), |  | ||||||
|     vol.Required(ATTR_BOOT): |  | ||||||
|         vol.In([BOOT_AUTO, BOOT_MANUAL]), |  | ||||||
|     vol.Optional(ATTR_PORTS): DOCKER_PORTS, |  | ||||||
|     vol.Optional(ATTR_WEBUI): |  | ||||||
|         vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"), |  | ||||||
|     vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")], |  | ||||||
|     vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_TMPFS): |  | ||||||
|         vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"), |  | ||||||
|     vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)], |  | ||||||
|     vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)}, |  | ||||||
|     vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)], |  | ||||||
|     vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_GPIO, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_STDIN, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(), |  | ||||||
|     vol.Required(ATTR_OPTIONS): dict, |  | ||||||
|     vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({ |  | ||||||
|         vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [ |  | ||||||
|             vol.Any( |  | ||||||
|                 SCHEMA_ELEMENT, |  | ||||||
|                 {vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])} |  | ||||||
|             ), |  | ||||||
|         ], vol.Schema({ |  | ||||||
|             vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT]) |  | ||||||
|         })) |  | ||||||
|     }), False), |  | ||||||
|     vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"), |  | ||||||
|     vol.Optional(ATTR_TIMEOUT, default=10): |  | ||||||
|         vol.All(vol.Coerce(int), vol.Range(min=10, max=120)), |  | ||||||
| }, extra=vol.REMOVE_EXTRA) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_REPOSITORY_CONFIG = vol.Schema({ |  | ||||||
|     vol.Required(ATTR_NAME): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_URL): vol.Url(), |  | ||||||
|     vol.Optional(ATTR_MAINTAINER): vol.Coerce(str), |  | ||||||
| }, extra=vol.REMOVE_EXTRA) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_BUILD_CONFIG = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({ |  | ||||||
|         vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"), |  | ||||||
|     }), |  | ||||||
|     vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_ARGS, default={}): vol.Schema({ |  | ||||||
|         vol.Coerce(str): vol.Coerce(str) |  | ||||||
|     }), |  | ||||||
| }, extra=vol.REMOVE_EXTRA) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_ADDON_USER = vol.Schema({ |  | ||||||
|     vol.Required(ATTR_VERSION): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): |  | ||||||
|         vol.Match(r"^[0-9a-f]{32}$"), |  | ||||||
|     vol.Optional(ATTR_OPTIONS, default={}): dict, |  | ||||||
|     vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_BOOT): |  | ||||||
|         vol.In([BOOT_AUTO, BOOT_MANUAL]), |  | ||||||
|     vol.Optional(ATTR_NETWORK): DOCKER_PORTS, |  | ||||||
|     vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL, |  | ||||||
|     vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL, |  | ||||||
| }, extra=vol.REMOVE_EXTRA) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({ |  | ||||||
|     vol.Required(ATTR_LOCATON): vol.Coerce(str), |  | ||||||
|     vol.Required(ATTR_REPOSITORY): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SCHEMA_ADDON_FILE = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_USER, default={}): { |  | ||||||
|         vol.Coerce(str): SCHEMA_ADDON_USER, |  | ||||||
|     }, |  | ||||||
|     vol.Optional(ATTR_SYSTEM, default={}): { |  | ||||||
|         vol.Coerce(str): SCHEMA_ADDON_SYSTEM, |  | ||||||
|     } |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SCHEMA_ADDON_SNAPSHOT = vol.Schema({ |  | ||||||
|     vol.Required(ATTR_USER): SCHEMA_ADDON_USER, |  | ||||||
|     vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM, |  | ||||||
|     vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]), |  | ||||||
|     vol.Required(ATTR_VERSION): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_options(raw_schema): |  | ||||||
|     """Validate schema.""" |  | ||||||
|     def validate(struct): |  | ||||||
|         """Create schema validator for addons options.""" |  | ||||||
|         options = {} |  | ||||||
|  |  | ||||||
|         # read options |  | ||||||
|         for key, value in struct.items(): |  | ||||||
|             # Ignore unknown options / remove from list |  | ||||||
|             if key not in raw_schema: |  | ||||||
|                 _LOGGER.warning("Unknown options %s", key) |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             typ = raw_schema[key] |  | ||||||
|             try: |  | ||||||
|                 if isinstance(typ, list): |  | ||||||
|                     # nested value list |  | ||||||
|                     options[key] = _nested_validate_list(typ[0], value, key) |  | ||||||
|                 elif isinstance(typ, dict): |  | ||||||
|                     # nested value dict |  | ||||||
|                     options[key] = _nested_validate_dict(typ, value, key) |  | ||||||
|                 else: |  | ||||||
|                     # normal value |  | ||||||
|                     options[key] = _single_validate(typ, value, key) |  | ||||||
|             except (IndexError, KeyError): |  | ||||||
|                 raise vol.Invalid( |  | ||||||
|                     "Type error for {}.".format(key)) from None |  | ||||||
|  |  | ||||||
|         _check_missing_options(raw_schema, options, 'root') |  | ||||||
|         return options |  | ||||||
|  |  | ||||||
|     return validate |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| def _single_validate(typ, value, key): |  | ||||||
|     """Validate a single element.""" |  | ||||||
|     # if required argument |  | ||||||
|     if value is None: |  | ||||||
|         raise vol.Invalid("Missing required option '{}'.".format(key)) |  | ||||||
|  |  | ||||||
|     # parse extend data from type |  | ||||||
|     match = RE_SCHEMA_ELEMENT.match(typ) |  | ||||||
|  |  | ||||||
|     # prepare range |  | ||||||
|     range_args = {} |  | ||||||
|     for group_name in ('i_min', 'i_max', 'f_min', 'f_max'): |  | ||||||
|         group_value = match.group(group_name) |  | ||||||
|         if group_value: |  | ||||||
|             range_args[group_name[2:]] = float(group_value) |  | ||||||
|  |  | ||||||
|     if typ.startswith(V_STR): |  | ||||||
|         return str(value) |  | ||||||
|     elif typ.startswith(V_INT): |  | ||||||
|         return vol.All(vol.Coerce(int), vol.Range(**range_args))(value) |  | ||||||
|     elif typ.startswith(V_FLOAT): |  | ||||||
|         return vol.All(vol.Coerce(float), vol.Range(**range_args))(value) |  | ||||||
|     elif typ.startswith(V_BOOL): |  | ||||||
|         return vol.Boolean()(value) |  | ||||||
|     elif typ.startswith(V_EMAIL): |  | ||||||
|         return vol.Email()(value) |  | ||||||
|     elif typ.startswith(V_URL): |  | ||||||
|         return vol.Url()(value) |  | ||||||
|     elif typ.startswith(V_PORT): |  | ||||||
|         return NETWORK_PORT(value) |  | ||||||
|     elif typ.startswith(V_MATCH): |  | ||||||
|         return vol.Match(match.group('match'))(str(value)) |  | ||||||
|  |  | ||||||
|     raise vol.Invalid("Fatal error for {} type {}".format(key, typ)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _nested_validate_list(typ, data_list, key): |  | ||||||
|     """Validate nested items.""" |  | ||||||
|     options = [] |  | ||||||
|  |  | ||||||
|     for element in data_list: |  | ||||||
|         # Nested? |  | ||||||
|         if isinstance(typ, dict): |  | ||||||
|             c_options = _nested_validate_dict(typ, element, key) |  | ||||||
|             options.append(c_options) |  | ||||||
|         else: |  | ||||||
|             options.append(_single_validate(typ, element, key)) |  | ||||||
|  |  | ||||||
|     return options |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _nested_validate_dict(typ, data_dict, key): |  | ||||||
|     """Validate nested items.""" |  | ||||||
|     options = {} |  | ||||||
|  |  | ||||||
|     for c_key, c_value in data_dict.items(): |  | ||||||
|         # Ignore unknown options / remove from list |  | ||||||
|         if c_key not in typ: |  | ||||||
|             _LOGGER.warning("Unknown options %s", c_key) |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         # Nested? |  | ||||||
|         if isinstance(typ[c_key], list): |  | ||||||
|             options[c_key] = _nested_validate_list(typ[c_key][0], |  | ||||||
|                                                    c_value, c_key) |  | ||||||
|         else: |  | ||||||
|             options[c_key] = _single_validate(typ[c_key], c_value, c_key) |  | ||||||
|  |  | ||||||
|     _check_missing_options(typ, options, key) |  | ||||||
|     return options |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _check_missing_options(origin, exists, root): |  | ||||||
|     """Check if all options are exists.""" |  | ||||||
|     missing = set(origin) - set(exists) |  | ||||||
|     for miss_opt in missing: |  | ||||||
|         if isinstance(origin[miss_opt], str) and \ |  | ||||||
|                 origin[miss_opt].endswith("?"): |  | ||||||
|             continue |  | ||||||
|         raise vol.Invalid( |  | ||||||
|             "Missing option {} in {}".format(miss_opt, root)) |  | ||||||
| @@ -1,177 +0,0 @@ | |||||||
| """Init file for HassIO rest api.""" |  | ||||||
| import logging |  | ||||||
| from pathlib import Path |  | ||||||
|  |  | ||||||
| from aiohttp import web |  | ||||||
|  |  | ||||||
| from .addons import APIAddons |  | ||||||
| from .homeassistant import APIHomeAssistant |  | ||||||
| from .host import APIHost |  | ||||||
| from .network import APINetwork |  | ||||||
| from .supervisor import APISupervisor |  | ||||||
| from .security import APISecurity |  | ||||||
| from .snapshots import APISnapshots |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class RestAPI(object): |  | ||||||
|     """Handle rest api for hassio.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop): |  | ||||||
|         """Initialize docker base wrapper.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.webapp = web.Application(loop=self.loop) |  | ||||||
|  |  | ||||||
|         # service stuff |  | ||||||
|         self._handler = None |  | ||||||
|         self.server = None |  | ||||||
|  |  | ||||||
|     def register_host(self, host_control, hardware): |  | ||||||
|         """Register hostcontrol function.""" |  | ||||||
|         api_host = APIHost(self.config, self.loop, host_control, hardware) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/host/info', api_host.info) |  | ||||||
|         self.webapp.router.add_get('/host/hardware', api_host.hardware) |  | ||||||
|         self.webapp.router.add_post('/host/reboot', api_host.reboot) |  | ||||||
|         self.webapp.router.add_post('/host/shutdown', api_host.shutdown) |  | ||||||
|         self.webapp.router.add_post('/host/update', api_host.update) |  | ||||||
|         self.webapp.router.add_post('/host/options', api_host.options) |  | ||||||
|  |  | ||||||
|     def register_network(self, host_control): |  | ||||||
|         """Register network function.""" |  | ||||||
|         api_net = APINetwork(self.config, self.loop, host_control) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/network/info', api_net.info) |  | ||||||
|         self.webapp.router.add_post('/network/options', api_net.options) |  | ||||||
|  |  | ||||||
|     def register_supervisor(self, supervisor, snapshots, addons, host_control, |  | ||||||
|                             updater): |  | ||||||
|         """Register supervisor function.""" |  | ||||||
|         api_supervisor = APISupervisor( |  | ||||||
|             self.config, self.loop, supervisor, snapshots, addons, |  | ||||||
|             host_control, updater) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping) |  | ||||||
|         self.webapp.router.add_get('/supervisor/info', api_supervisor.info) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/supervisor/update', api_supervisor.update) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/supervisor/reload', api_supervisor.reload) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/supervisor/options', api_supervisor.options) |  | ||||||
|         self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs) |  | ||||||
|  |  | ||||||
|     def register_homeassistant(self, dock_homeassistant): |  | ||||||
|         """Register homeassistant function.""" |  | ||||||
|         api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/homeassistant/info', api_hass.info) |  | ||||||
|         self.webapp.router.add_get('/homeassistant/logs', api_hass.logs) |  | ||||||
|         self.webapp.router.add_post('/homeassistant/options', api_hass.options) |  | ||||||
|         self.webapp.router.add_post('/homeassistant/update', api_hass.update) |  | ||||||
|         self.webapp.router.add_post('/homeassistant/restart', api_hass.restart) |  | ||||||
|         self.webapp.router.add_post('/homeassistant/stop', api_hass.stop) |  | ||||||
|         self.webapp.router.add_post('/homeassistant/start', api_hass.start) |  | ||||||
|         self.webapp.router.add_post('/homeassistant/check', api_hass.check) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/homeassistant/api/{path:.+}', api_hass.api) |  | ||||||
|         self.webapp.router.add_get( |  | ||||||
|             '/homeassistant/api/{path:.+}', api_hass.api) |  | ||||||
|  |  | ||||||
|     def register_addons(self, addons): |  | ||||||
|         """Register homeassistant function.""" |  | ||||||
|         api_addons = APIAddons(self.config, self.loop, addons) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/addons', api_addons.list) |  | ||||||
|         self.webapp.router.add_post('/addons/reload', api_addons.reload) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/addons/{addon}/info', api_addons.info) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/addons/{addon}/install', api_addons.install) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/addons/{addon}/uninstall', api_addons.uninstall) |  | ||||||
|         self.webapp.router.add_post('/addons/{addon}/start', api_addons.start) |  | ||||||
|         self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/addons/{addon}/restart', api_addons.restart) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/addons/{addon}/update', api_addons.update) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/addons/{addon}/options', api_addons.options) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/addons/{addon}/rebuild', api_addons.rebuild) |  | ||||||
|         self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs) |  | ||||||
|         self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo) |  | ||||||
|         self.webapp.router.add_get( |  | ||||||
|             '/addons/{addon}/changelog', api_addons.changelog) |  | ||||||
|         self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin) |  | ||||||
|  |  | ||||||
|     def register_security(self): |  | ||||||
|         """Register security function.""" |  | ||||||
|         api_security = APISecurity(self.config, self.loop) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/security/info', api_security.info) |  | ||||||
|         self.webapp.router.add_post('/security/options', api_security.options) |  | ||||||
|         self.webapp.router.add_post('/security/totp', api_security.totp) |  | ||||||
|         self.webapp.router.add_post('/security/session', api_security.session) |  | ||||||
|  |  | ||||||
|     def register_snapshots(self, snapshots): |  | ||||||
|         """Register snapshots function.""" |  | ||||||
|         api_snapshots = APISnapshots(self.config, self.loop, snapshots) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get('/snapshots', api_snapshots.list) |  | ||||||
|         self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/snapshots/new/full', api_snapshots.snapshot_full) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/snapshots/new/partial', api_snapshots.snapshot_partial) |  | ||||||
|  |  | ||||||
|         self.webapp.router.add_get( |  | ||||||
|             '/snapshots/{snapshot}/info', api_snapshots.info) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/snapshots/{snapshot}/remove', api_snapshots.remove) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/snapshots/{snapshot}/restore/full', api_snapshots.restore_full) |  | ||||||
|         self.webapp.router.add_post( |  | ||||||
|             '/snapshots/{snapshot}/restore/partial', |  | ||||||
|             api_snapshots.restore_partial) |  | ||||||
|  |  | ||||||
|     def register_panel(self): |  | ||||||
|         """Register panel for homeassistant.""" |  | ||||||
|         def create_panel_response(build_type): |  | ||||||
|             """Create a function to generate a response.""" |  | ||||||
|             path = Path(__file__).parents[1].joinpath( |  | ||||||
|                 'panel/hassio-main-{}.html'.format(build_type)) |  | ||||||
|  |  | ||||||
|             return lambda request: web.FileResponse(path) |  | ||||||
|  |  | ||||||
|         # This route is for backwards compatibility with HA < 0.58 |  | ||||||
|         self.webapp.router.add_get('/panel', create_panel_response('es5')) |  | ||||||
|         self.webapp.router.add_get('/panel_es5', create_panel_response('es5')) |  | ||||||
|         self.webapp.router.add_get( |  | ||||||
|             '/panel_latest', create_panel_response('latest')) |  | ||||||
|  |  | ||||||
|     async def start(self): |  | ||||||
|         """Run rest api webserver.""" |  | ||||||
|         self._handler = self.webapp.make_handler(loop=self.loop) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             self.server = await self.loop.create_server( |  | ||||||
|                 self._handler, "0.0.0.0", "80") |  | ||||||
|         except OSError as err: |  | ||||||
|             _LOGGER.fatal( |  | ||||||
|                 "Failed to create HTTP server at 0.0.0.0:80 -> %s", err) |  | ||||||
|  |  | ||||||
|     async def stop(self): |  | ||||||
|         """Stop rest api webserver.""" |  | ||||||
|         if self.server: |  | ||||||
|             self.server.close() |  | ||||||
|             await self.server.wait_closed() |  | ||||||
|         await self.webapp.shutdown() |  | ||||||
|  |  | ||||||
|         if self._handler: |  | ||||||
|             await self._handler.shutdown(60) |  | ||||||
|         await self.webapp.cleanup() |  | ||||||
| @@ -1,256 +0,0 @@ | |||||||
| """Init file for HassIO homeassistant rest api.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
| from voluptuous.humanize import humanize_error |  | ||||||
|  |  | ||||||
| from .util import api_process, api_process_raw, api_validate |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS, |  | ||||||
|     ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY, |  | ||||||
|     ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG, |  | ||||||
|     ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER, |  | ||||||
|     ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED, |  | ||||||
|     ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, |  | ||||||
|     ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL, |  | ||||||
|     ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, |  | ||||||
|     CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT) |  | ||||||
| from ..validate import DOCKER_PORTS |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| SCHEMA_VERSION = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_VERSION): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_OPTIONS = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), |  | ||||||
|     vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS), |  | ||||||
|     vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class APIAddons(object): |  | ||||||
|     """Handle rest api for addons functions.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, addons): |  | ||||||
|         """Initialize homeassistant rest api part.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.addons = addons |  | ||||||
|  |  | ||||||
|     def _extract_addon(self, request, check_installed=True): |  | ||||||
|         """Return addon and if not exists trow a exception.""" |  | ||||||
|         addon = self.addons.get(request.match_info.get('addon')) |  | ||||||
|         if not addon: |  | ||||||
|             raise RuntimeError("Addon not exists") |  | ||||||
|  |  | ||||||
|         if check_installed and not addon.is_installed: |  | ||||||
|             raise RuntimeError("Addon is not installed") |  | ||||||
|  |  | ||||||
|         return addon |  | ||||||
|  |  | ||||||
|     @staticmethod |  | ||||||
|     def _pretty_devices(addon): |  | ||||||
|         """Return a simplified device list.""" |  | ||||||
|         dev_list = addon.devices |  | ||||||
|         if not dev_list: |  | ||||||
|             return |  | ||||||
|         return [row.split(':')[0] for row in dev_list] |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def list(self, request): |  | ||||||
|         """Return all addons / repositories .""" |  | ||||||
|         data_addons = [] |  | ||||||
|         for addon in self.addons.list_addons: |  | ||||||
|             data_addons.append({ |  | ||||||
|                 ATTR_NAME: addon.name, |  | ||||||
|                 ATTR_SLUG: addon.slug, |  | ||||||
|                 ATTR_DESCRIPTON: addon.description, |  | ||||||
|                 ATTR_VERSION: addon.last_version, |  | ||||||
|                 ATTR_INSTALLED: addon.version_installed, |  | ||||||
|                 ATTR_ARCH: addon.supported_arch, |  | ||||||
|                 ATTR_DETACHED: addon.is_detached, |  | ||||||
|                 ATTR_REPOSITORY: addon.repository, |  | ||||||
|                 ATTR_BUILD: addon.need_build, |  | ||||||
|                 ATTR_URL: addon.url, |  | ||||||
|                 ATTR_LOGO: addon.with_logo, |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|         data_repositories = [] |  | ||||||
|         for repository in self.addons.list_repositories: |  | ||||||
|             data_repositories.append({ |  | ||||||
|                 ATTR_SLUG: repository.slug, |  | ||||||
|                 ATTR_NAME: repository.name, |  | ||||||
|                 ATTR_SOURCE: repository.source, |  | ||||||
|                 ATTR_URL: repository.url, |  | ||||||
|                 ATTR_MAINTAINER: repository.maintainer, |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|         return { |  | ||||||
|             ATTR_ADDONS: data_addons, |  | ||||||
|             ATTR_REPOSITORIES: data_repositories, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def reload(self, request): |  | ||||||
|         """Reload all addons data.""" |  | ||||||
|         await asyncio.shield(self.addons.reload(), loop=self.loop) |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def info(self, request): |  | ||||||
|         """Return addon information.""" |  | ||||||
|         addon = self._extract_addon(request, check_installed=False) |  | ||||||
|  |  | ||||||
|         return { |  | ||||||
|             ATTR_NAME: addon.name, |  | ||||||
|             ATTR_DESCRIPTON: addon.description, |  | ||||||
|             ATTR_VERSION: addon.version_installed, |  | ||||||
|             ATTR_AUTO_UPDATE: addon.auto_update, |  | ||||||
|             ATTR_REPOSITORY: addon.repository, |  | ||||||
|             ATTR_LAST_VERSION: addon.last_version, |  | ||||||
|             ATTR_STATE: await addon.state(), |  | ||||||
|             ATTR_BOOT: addon.boot, |  | ||||||
|             ATTR_OPTIONS: addon.options, |  | ||||||
|             ATTR_URL: addon.url, |  | ||||||
|             ATTR_DETACHED: addon.is_detached, |  | ||||||
|             ATTR_BUILD: addon.need_build, |  | ||||||
|             ATTR_NETWORK: addon.ports, |  | ||||||
|             ATTR_HOST_NETWORK: addon.host_network, |  | ||||||
|             ATTR_HOST_IPC: addon.host_ipc, |  | ||||||
|             ATTR_HOST_DBUS: addon.host_dbus, |  | ||||||
|             ATTR_PRIVILEGED: addon.privileged, |  | ||||||
|             ATTR_DEVICES: self._pretty_devices(addon), |  | ||||||
|             ATTR_LOGO: addon.with_logo, |  | ||||||
|             ATTR_CHANGELOG: addon.with_changelog, |  | ||||||
|             ATTR_WEBUI: addon.webui, |  | ||||||
|             ATTR_STDIN: addon.with_stdin, |  | ||||||
|             ATTR_HASSIO_API: addon.access_hassio_api, |  | ||||||
|             ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api, |  | ||||||
|             ATTR_GPIO: addon.with_gpio, |  | ||||||
|             ATTR_AUDIO: addon.with_audio, |  | ||||||
|             ATTR_AUDIO_INPUT: addon.audio_input, |  | ||||||
|             ATTR_AUDIO_OUTPUT: addon.audio_output, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def options(self, request): |  | ||||||
|         """Store user options for addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|  |  | ||||||
|         addon_schema = SCHEMA_OPTIONS.extend({ |  | ||||||
|             vol.Optional(ATTR_OPTIONS): addon.schema, |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         body = await api_validate(addon_schema, request) |  | ||||||
|  |  | ||||||
|         if ATTR_OPTIONS in body: |  | ||||||
|             addon.options = body[ATTR_OPTIONS] |  | ||||||
|         if ATTR_BOOT in body: |  | ||||||
|             addon.boot = body[ATTR_BOOT] |  | ||||||
|         if ATTR_AUTO_UPDATE in body: |  | ||||||
|             addon.auto_update = body[ATTR_AUTO_UPDATE] |  | ||||||
|         if ATTR_NETWORK in body: |  | ||||||
|             addon.ports = body[ATTR_NETWORK] |  | ||||||
|         if ATTR_AUDIO_INPUT in body: |  | ||||||
|             addon.audio_input = body[ATTR_AUDIO_INPUT] |  | ||||||
|         if ATTR_AUDIO_OUTPUT in body: |  | ||||||
|             addon.audio_output = body[ATTR_AUDIO_OUTPUT] |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def install(self, request): |  | ||||||
|         """Install addon.""" |  | ||||||
|         addon = self._extract_addon(request, check_installed=False) |  | ||||||
|         return asyncio.shield(addon.install(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def uninstall(self, request): |  | ||||||
|         """Uninstall addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|         return asyncio.shield(addon.uninstall(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def start(self, request): |  | ||||||
|         """Start addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|  |  | ||||||
|         # check options |  | ||||||
|         options = addon.options |  | ||||||
|         try: |  | ||||||
|             addon.schema(options) |  | ||||||
|         except vol.Invalid as ex: |  | ||||||
|             raise RuntimeError(humanize_error(options, ex)) from None |  | ||||||
|  |  | ||||||
|         return asyncio.shield(addon.start(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def stop(self, request): |  | ||||||
|         """Stop addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|         return asyncio.shield(addon.stop(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def update(self, request): |  | ||||||
|         """Update addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|  |  | ||||||
|         if addon.last_version == addon.version_installed: |  | ||||||
|             raise RuntimeError("No update available!") |  | ||||||
|  |  | ||||||
|         return asyncio.shield(addon.update(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def restart(self, request): |  | ||||||
|         """Restart addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|         return asyncio.shield(addon.restart(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def rebuild(self, request): |  | ||||||
|         """Rebuild local build addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|         if not addon.need_build: |  | ||||||
|             raise RuntimeError("Only local build addons are supported") |  | ||||||
|  |  | ||||||
|         return asyncio.shield(addon.rebuild(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |  | ||||||
|     def logs(self, request): |  | ||||||
|         """Return logs from addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|         return addon.logs() |  | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_PNG) |  | ||||||
|     async def logo(self, request): |  | ||||||
|         """Return logo from addon.""" |  | ||||||
|         addon = self._extract_addon(request, check_installed=False) |  | ||||||
|         if not addon.with_logo: |  | ||||||
|             raise RuntimeError("No image found!") |  | ||||||
|  |  | ||||||
|         with addon.path_logo.open('rb') as png: |  | ||||||
|             return png.read() |  | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_TEXT) |  | ||||||
|     async def changelog(self, request): |  | ||||||
|         """Return changelog from addon.""" |  | ||||||
|         addon = self._extract_addon(request, check_installed=False) |  | ||||||
|         if not addon.with_changelog: |  | ||||||
|             raise RuntimeError("No changelog found!") |  | ||||||
|  |  | ||||||
|         with addon.path_changelog.open('r') as changelog: |  | ||||||
|             return changelog.read() |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def stdin(self, request): |  | ||||||
|         """Write to stdin of addon.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|         if not addon.with_stdin: |  | ||||||
|             raise RuntimeError("STDIN not supported by addons") |  | ||||||
|  |  | ||||||
|         data = await request.read() |  | ||||||
|         return await asyncio.shield(addon.write_stdin(data), loop=self.loop) |  | ||||||
| @@ -1,182 +0,0 @@ | |||||||
| """Init file for HassIO homeassistant rest api.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import aiohttp |  | ||||||
| from aiohttp import web |  | ||||||
| from aiohttp.web_exceptions import HTTPBadGateway |  | ||||||
| from aiohttp.hdrs import CONTENT_TYPE |  | ||||||
| import async_timeout |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from .util import api_process, api_process_raw, api_validate |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM, |  | ||||||
|     ATTR_BOOT, ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, |  | ||||||
|     CONTENT_TYPE_BINARY, HEADER_HA_ACCESS) |  | ||||||
| from ..validate import HASS_DEVICES, NETWORK_PORT |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_OPTIONS = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_DEVICES): HASS_DEVICES, |  | ||||||
|     vol.Optional(ATTR_BOOT): vol.Boolean(), |  | ||||||
|     vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)), |  | ||||||
|     vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): |  | ||||||
|         vol.Any(None, vol.Coerce(str)), |  | ||||||
|     vol.Optional(ATTR_PORT): NETWORK_PORT, |  | ||||||
|     vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), |  | ||||||
|     vol.Optional(ATTR_SSL): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_WATCHDOG): vol.Boolean(), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| SCHEMA_VERSION = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_VERSION): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class APIHomeAssistant(object): |  | ||||||
|     """Handle rest api for homeassistant functions.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, homeassistant): |  | ||||||
|         """Initialize homeassistant rest api part.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.homeassistant = homeassistant |  | ||||||
|  |  | ||||||
|     async def homeassistant_proxy(self, path, request): |  | ||||||
|         """Return a client request with proxy origin for Home-Assistant.""" |  | ||||||
|         url = "{}/api/{}".format(self.homeassistant.api_url, path) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             data = None |  | ||||||
|             headers = {} |  | ||||||
|             method = getattr( |  | ||||||
|                 self.homeassistant.websession, request.method.lower()) |  | ||||||
|  |  | ||||||
|             # read data |  | ||||||
|             with async_timeout.timeout(10, loop=self.loop): |  | ||||||
|                 data = await request.read() |  | ||||||
|  |  | ||||||
|             if data: |  | ||||||
|                 headers.update({CONTENT_TYPE: request.content_type}) |  | ||||||
|  |  | ||||||
|             # need api password? |  | ||||||
|             if self.homeassistant.api_password: |  | ||||||
|                 headers = {HEADER_HA_ACCESS: self.homeassistant.api_password} |  | ||||||
|  |  | ||||||
|             # reset headers |  | ||||||
|             if not headers: |  | ||||||
|                 headers = None |  | ||||||
|  |  | ||||||
|             client = await method( |  | ||||||
|                 url, data=data, headers=headers, timeout=300 |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             return client |  | ||||||
|  |  | ||||||
|         except aiohttp.ClientError as err: |  | ||||||
|             _LOGGER.error("Client error on api %s request %s.", path, err) |  | ||||||
|  |  | ||||||
|         except asyncio.TimeoutError: |  | ||||||
|             _LOGGER.error("Client timeout error on api request %s.", path) |  | ||||||
|  |  | ||||||
|         raise HTTPBadGateway() |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def info(self, request): |  | ||||||
|         """Return host information.""" |  | ||||||
|         return { |  | ||||||
|             ATTR_VERSION: self.homeassistant.version, |  | ||||||
|             ATTR_LAST_VERSION: self.homeassistant.last_version, |  | ||||||
|             ATTR_IMAGE: self.homeassistant.image, |  | ||||||
|             ATTR_DEVICES: self.homeassistant.devices, |  | ||||||
|             ATTR_CUSTOM: self.homeassistant.is_custom_image, |  | ||||||
|             ATTR_BOOT: self.homeassistant.boot, |  | ||||||
|             ATTR_PORT: self.homeassistant.api_port, |  | ||||||
|             ATTR_SSL: self.homeassistant.api_ssl, |  | ||||||
|             ATTR_WATCHDOG: self.homeassistant.watchdog, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def options(self, request): |  | ||||||
|         """Set homeassistant options.""" |  | ||||||
|         body = await api_validate(SCHEMA_OPTIONS, request) |  | ||||||
|  |  | ||||||
|         if ATTR_DEVICES in body: |  | ||||||
|             self.homeassistant.devices = body[ATTR_DEVICES] |  | ||||||
|  |  | ||||||
|         if ATTR_IMAGE in body: |  | ||||||
|             self.homeassistant.set_custom( |  | ||||||
|                 body[ATTR_IMAGE], body[ATTR_LAST_VERSION]) |  | ||||||
|  |  | ||||||
|         if ATTR_BOOT in body: |  | ||||||
|             self.homeassistant.boot = body[ATTR_BOOT] |  | ||||||
|  |  | ||||||
|         if ATTR_PORT in body: |  | ||||||
|             self.homeassistant.api_port = body[ATTR_PORT] |  | ||||||
|  |  | ||||||
|         if ATTR_PASSWORD in body: |  | ||||||
|             self.homeassistant.api_password = body[ATTR_PASSWORD] |  | ||||||
|  |  | ||||||
|         if ATTR_SSL in body: |  | ||||||
|             self.homeassistant.api_ssl = body[ATTR_SSL] |  | ||||||
|  |  | ||||||
|         if ATTR_WATCHDOG in body: |  | ||||||
|             self.homeassistant.watchdog = body[ATTR_WATCHDOG] |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def update(self, request): |  | ||||||
|         """Update homeassistant.""" |  | ||||||
|         body = await api_validate(SCHEMA_VERSION, request) |  | ||||||
|         version = body.get(ATTR_VERSION, self.homeassistant.last_version) |  | ||||||
|  |  | ||||||
|         if version == self.homeassistant.version: |  | ||||||
|             raise RuntimeError("Version {} is already in use".format(version)) |  | ||||||
|  |  | ||||||
|         return await asyncio.shield( |  | ||||||
|             self.homeassistant.update(version), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def stop(self, request): |  | ||||||
|         """Stop homeassistant.""" |  | ||||||
|         return asyncio.shield(self.homeassistant.stop(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def start(self, request): |  | ||||||
|         """Start homeassistant.""" |  | ||||||
|         return asyncio.shield(self.homeassistant.run(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def restart(self, request): |  | ||||||
|         """Restart homeassistant.""" |  | ||||||
|         return asyncio.shield(self.homeassistant.restart(), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |  | ||||||
|     def logs(self, request): |  | ||||||
|         """Return homeassistant docker logs.""" |  | ||||||
|         return self.homeassistant.logs() |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def check(self, request): |  | ||||||
|         """Check config of homeassistant.""" |  | ||||||
|         code, message = await self.homeassistant.check_config() |  | ||||||
|         if not code: |  | ||||||
|             raise RuntimeError(message) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def api(self, request): |  | ||||||
|         """Proxy API request to Home-Assistant.""" |  | ||||||
|         path = request.match_info.get('path') |  | ||||||
|  |  | ||||||
|         client = await self.homeassistant_proxy(path, request) |  | ||||||
|         return web.Response( |  | ||||||
|             body=await client.read(), |  | ||||||
|             status=client.status, |  | ||||||
|             content_type=client.content_type |  | ||||||
|         ) |  | ||||||
| @@ -1,91 +0,0 @@ | |||||||
| """Init file for HassIO host rest api.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from .util import api_process_hostcontrol, api_process, api_validate |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES, |  | ||||||
|     ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT, |  | ||||||
|     ATTR_AUDIO_OUTPUT, ATTR_GPIO) |  | ||||||
| from ..validate import ALSA_CHANNEL |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| SCHEMA_VERSION = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_VERSION): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| SCHEMA_OPTIONS = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL, |  | ||||||
|     vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL, |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class APIHost(object): |  | ||||||
|     """Handle rest api for host functions.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, host_control, hardware): |  | ||||||
|         """Initialize host rest api part.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.host_control = host_control |  | ||||||
|         self.local_hw = hardware |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def info(self, request): |  | ||||||
|         """Return host information.""" |  | ||||||
|         return { |  | ||||||
|             ATTR_TYPE: self.host_control.type, |  | ||||||
|             ATTR_VERSION: self.host_control.version, |  | ||||||
|             ATTR_LAST_VERSION: self.host_control.last_version, |  | ||||||
|             ATTR_FEATURES: self.host_control.features, |  | ||||||
|             ATTR_HOSTNAME: self.host_control.hostname, |  | ||||||
|             ATTR_OS: self.host_control.os_info, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def options(self, request): |  | ||||||
|         """Process host options.""" |  | ||||||
|         body = await api_validate(SCHEMA_OPTIONS, request) |  | ||||||
|  |  | ||||||
|         if ATTR_AUDIO_OUTPUT in body: |  | ||||||
|             self.config.audio_output = body[ATTR_AUDIO_OUTPUT] |  | ||||||
|         if ATTR_AUDIO_INPUT in body: |  | ||||||
|             self.config.audio_input = body[ATTR_AUDIO_INPUT] |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process_hostcontrol |  | ||||||
|     def reboot(self, request): |  | ||||||
|         """Reboot host.""" |  | ||||||
|         return self.host_control.reboot() |  | ||||||
|  |  | ||||||
|     @api_process_hostcontrol |  | ||||||
|     def shutdown(self, request): |  | ||||||
|         """Poweroff host.""" |  | ||||||
|         return self.host_control.shutdown() |  | ||||||
|  |  | ||||||
|     @api_process_hostcontrol |  | ||||||
|     async def update(self, request): |  | ||||||
|         """Update host OS.""" |  | ||||||
|         body = await api_validate(SCHEMA_VERSION, request) |  | ||||||
|         version = body.get(ATTR_VERSION, self.host_control.last_version) |  | ||||||
|  |  | ||||||
|         if version == self.host_control.version: |  | ||||||
|             raise RuntimeError("Version {} is already in use".format(version)) |  | ||||||
|  |  | ||||||
|         return await asyncio.shield( |  | ||||||
|             self.host_control.update(version=version), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def hardware(self, request): |  | ||||||
|         """Return local hardware infos.""" |  | ||||||
|         return { |  | ||||||
|             ATTR_SERIAL: list(self.local_hw.serial_devices), |  | ||||||
|             ATTR_INPUT: list(self.local_hw.input_devices), |  | ||||||
|             ATTR_DISK: list(self.local_hw.disk_devices), |  | ||||||
|             ATTR_GPIO: list(self.local_hw.gpio_devices), |  | ||||||
|             ATTR_AUDIO: self.local_hw.audio_devices, |  | ||||||
|         } |  | ||||||
| @@ -1,43 +0,0 @@ | |||||||
| """Init file for HassIO network rest api.""" |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from .util import api_process, api_process_hostcontrol, api_validate |  | ||||||
| from ..const import ATTR_HOSTNAME |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SCHEMA_OPTIONS = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_HOSTNAME): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class APINetwork(object): |  | ||||||
|     """Handle rest api for network functions.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, host_control): |  | ||||||
|         """Initialize network rest api part.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.host_control = host_control |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def info(self, request): |  | ||||||
|         """Show network settings.""" |  | ||||||
|         return { |  | ||||||
|             ATTR_HOSTNAME: self.host_control.hostname, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process_hostcontrol |  | ||||||
|     async def options(self, request): |  | ||||||
|         """Edit network settings.""" |  | ||||||
|         body = await api_validate(SCHEMA_OPTIONS, request) |  | ||||||
|  |  | ||||||
|         # hostname |  | ||||||
|         if ATTR_HOSTNAME in body: |  | ||||||
|             if self.host_control.hostname != body[ATTR_HOSTNAME]: |  | ||||||
|                 await self.host_control.set_hostname(body[ATTR_HOSTNAME]) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
| @@ -1,102 +0,0 @@ | |||||||
| """Init file for HassIO security rest api.""" |  | ||||||
| from datetime import datetime, timedelta |  | ||||||
| import io |  | ||||||
| import logging |  | ||||||
| import hashlib |  | ||||||
| import os |  | ||||||
|  |  | ||||||
| from aiohttp import web |  | ||||||
| import voluptuous as vol |  | ||||||
| import pyotp |  | ||||||
| import pyqrcode |  | ||||||
|  |  | ||||||
| from .util import api_process, api_validate, hash_password |  | ||||||
| from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| SCHEMA_PASSWORD = vol.Schema({ |  | ||||||
|     vol.Required(ATTR_PASSWORD): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| SCHEMA_SESSION = SCHEMA_PASSWORD.extend({ |  | ||||||
|     vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class APISecurity(object): |  | ||||||
|     """Handle rest api for security functions.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop): |  | ||||||
|         """Initialize security rest api part.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|  |  | ||||||
|     def _check_password(self, body): |  | ||||||
|         """Check if password is valid and security is initialize.""" |  | ||||||
|         if not self.config.security_initialize: |  | ||||||
|             raise RuntimeError("First set a password") |  | ||||||
|  |  | ||||||
|         password = hash_password(body[ATTR_PASSWORD]) |  | ||||||
|         if password != self.config.security_password: |  | ||||||
|             raise RuntimeError("Wrong password") |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def info(self, request): |  | ||||||
|         """Return host information.""" |  | ||||||
|         return { |  | ||||||
|             ATTR_INITIALIZE: self.config.security_initialize, |  | ||||||
|             ATTR_TOTP: self.config.security_totp is not None, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def options(self, request): |  | ||||||
|         """Set options / password.""" |  | ||||||
|         body = await api_validate(SCHEMA_PASSWORD, request) |  | ||||||
|  |  | ||||||
|         if self.config.security_initialize: |  | ||||||
|             raise RuntimeError("Password is already set!") |  | ||||||
|  |  | ||||||
|         self.config.security_password = hash_password(body[ATTR_PASSWORD]) |  | ||||||
|         self.config.security_initialize = True |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def totp(self, request): |  | ||||||
|         """Set and initialze TOTP.""" |  | ||||||
|         body = await api_validate(SCHEMA_PASSWORD, request) |  | ||||||
|         self._check_password(body) |  | ||||||
|  |  | ||||||
|         # generate TOTP |  | ||||||
|         totp_init_key = pyotp.random_base32() |  | ||||||
|         totp = pyotp.TOTP(totp_init_key) |  | ||||||
|  |  | ||||||
|         # init qrcode |  | ||||||
|         buff = io.BytesIO() |  | ||||||
|  |  | ||||||
|         qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO")) |  | ||||||
|         qrcode.svg(buff) |  | ||||||
|  |  | ||||||
|         # finish |  | ||||||
|         self.config.security_totp = totp_init_key |  | ||||||
|         return web.Response(body=buff.getvalue(), content_type='image/svg+xml') |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def session(self, request): |  | ||||||
|         """Set and initialze session.""" |  | ||||||
|         body = await api_validate(SCHEMA_SESSION, request) |  | ||||||
|         self._check_password(body) |  | ||||||
|  |  | ||||||
|         # check TOTP |  | ||||||
|         if self.config.security_totp: |  | ||||||
|             totp = pyotp.TOTP(self.config.security_totp) |  | ||||||
|             if body[ATTR_TOTP] != totp.now(): |  | ||||||
|                 raise RuntimeError("Invalid TOTP token!") |  | ||||||
|  |  | ||||||
|         # create session |  | ||||||
|         valid_until = datetime.now() + timedelta(days=1) |  | ||||||
|         session = hashlib.sha256(os.urandom(54)).hexdigest() |  | ||||||
|  |  | ||||||
|         # store session |  | ||||||
|         self.config.add_security_session(session, valid_until) |  | ||||||
|         return {ATTR_SESSION: session} |  | ||||||
| @@ -1,135 +0,0 @@ | |||||||
| """Init file for HassIO snapshot rest api.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from .util import api_process, api_validate |  | ||||||
| from ..snapshots.validate import ALL_FOLDERS |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES, |  | ||||||
|     ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE, |  | ||||||
|     ATTR_DEVICES, ATTR_SNAPSHOTS) |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_RESTORE_PARTIAL = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_ADDONS): [vol.Coerce(str)], |  | ||||||
|     vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)], |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| SCHEMA_SNAPSHOT_FULL = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_NAME): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({ |  | ||||||
|     vol.Optional(ATTR_ADDONS): [vol.Coerce(str)], |  | ||||||
|     vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)], |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class APISnapshots(object): |  | ||||||
|     """Handle rest api for snapshot functions.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, snapshots): |  | ||||||
|         """Initialize network rest api part.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.snapshots = snapshots |  | ||||||
|  |  | ||||||
|     def _extract_snapshot(self, request): |  | ||||||
|         """Return addon and if not exists trow a exception.""" |  | ||||||
|         snapshot = self.snapshots.get(request.match_info.get('snapshot')) |  | ||||||
|         if not snapshot: |  | ||||||
|             raise RuntimeError("Snapshot not exists") |  | ||||||
|         return snapshot |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def list(self, request): |  | ||||||
|         """Return snapshot list.""" |  | ||||||
|         data_snapshots = [] |  | ||||||
|         for snapshot in self.snapshots.list_snapshots: |  | ||||||
|             data_snapshots.append({ |  | ||||||
|                 ATTR_SLUG: snapshot.slug, |  | ||||||
|                 ATTR_NAME: snapshot.name, |  | ||||||
|                 ATTR_DATE: snapshot.date, |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|         return { |  | ||||||
|             ATTR_SNAPSHOTS: data_snapshots, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def reload(self, request): |  | ||||||
|         """Reload snapshot list.""" |  | ||||||
|         await asyncio.shield(self.snapshots.reload(), loop=self.loop) |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def info(self, request): |  | ||||||
|         """Return snapshot info.""" |  | ||||||
|         snapshot = self._extract_snapshot(request) |  | ||||||
|  |  | ||||||
|         data_addons = [] |  | ||||||
|         for addon_data in snapshot.addons: |  | ||||||
|             data_addons.append({ |  | ||||||
|                 ATTR_SLUG: addon_data[ATTR_SLUG], |  | ||||||
|                 ATTR_NAME: addon_data[ATTR_NAME], |  | ||||||
|                 ATTR_VERSION: addon_data[ATTR_VERSION], |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|         return { |  | ||||||
|             ATTR_SLUG: snapshot.slug, |  | ||||||
|             ATTR_TYPE: snapshot.sys_type, |  | ||||||
|             ATTR_NAME: snapshot.name, |  | ||||||
|             ATTR_DATE: snapshot.date, |  | ||||||
|             ATTR_SIZE: snapshot.size, |  | ||||||
|             ATTR_HOMEASSISTANT: { |  | ||||||
|                 ATTR_VERSION: snapshot.homeassistant_version, |  | ||||||
|                 ATTR_DEVICES: snapshot.homeassistant_devices, |  | ||||||
|             }, |  | ||||||
|             ATTR_ADDONS: data_addons, |  | ||||||
|             ATTR_REPOSITORIES: snapshot.repositories, |  | ||||||
|             ATTR_FOLDERS: snapshot.folders, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def snapshot_full(self, request): |  | ||||||
|         """Full-Snapshot a snapshot.""" |  | ||||||
|         body = await api_validate(SCHEMA_SNAPSHOT_FULL, request) |  | ||||||
|         return await asyncio.shield( |  | ||||||
|             self.snapshots.do_snapshot_full(**body), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def snapshot_partial(self, request): |  | ||||||
|         """Partial-Snapshot a snapshot.""" |  | ||||||
|         body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request) |  | ||||||
|         return await asyncio.shield( |  | ||||||
|             self.snapshots.do_snapshot_partial(**body), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     def restore_full(self, request): |  | ||||||
|         """Full-Restore a snapshot.""" |  | ||||||
|         snapshot = self._extract_snapshot(request) |  | ||||||
|         return asyncio.shield( |  | ||||||
|             self.snapshots.do_restore_full(snapshot), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def restore_partial(self, request): |  | ||||||
|         """Partial-Restore a snapshot.""" |  | ||||||
|         snapshot = self._extract_snapshot(request) |  | ||||||
|         body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request) |  | ||||||
|  |  | ||||||
|         return await asyncio.shield( |  | ||||||
|             self.snapshots.do_restore_partial(snapshot, **body), |  | ||||||
|             loop=self.loop |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def remove(self, request): |  | ||||||
|         """Remove a snapshot.""" |  | ||||||
|         snapshot = self._extract_snapshot(request) |  | ||||||
|         return self.snapshots.remove(snapshot) |  | ||||||
| @@ -1,125 +0,0 @@ | |||||||
| """Init file for HassIO supervisor rest api.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from .util import api_process, api_process_raw, api_validate |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH, |  | ||||||
|     HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY, |  | ||||||
|     ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE, |  | ||||||
|     ATTR_STATE, CONTENT_TYPE_BINARY) |  | ||||||
| from ..validate import validate_timezone |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| SCHEMA_OPTIONS = vol.Schema({ |  | ||||||
|     # pylint: disable=no-value-for-parameter |  | ||||||
|     vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()], |  | ||||||
|     vol.Optional(ATTR_TIMEZONE): validate_timezone, |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| SCHEMA_VERSION = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_VERSION): vol.Coerce(str), |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class APISupervisor(object): |  | ||||||
|     """Handle rest api for supervisor functions.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, supervisor, snapshots, addons, |  | ||||||
|                  host_control, updater): |  | ||||||
|         """Initialize supervisor rest api part.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.supervisor = supervisor |  | ||||||
|         self.addons = addons |  | ||||||
|         self.snapshots = snapshots |  | ||||||
|         self.host_control = host_control |  | ||||||
|         self.updater = updater |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def ping(self, request): |  | ||||||
|         """Return ok for signal that the api is ready.""" |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def info(self, request): |  | ||||||
|         """Return host information.""" |  | ||||||
|         list_addons = [] |  | ||||||
|         for addon in self.addons.list_addons: |  | ||||||
|             if addon.is_installed: |  | ||||||
|                 list_addons.append({ |  | ||||||
|                     ATTR_NAME: addon.name, |  | ||||||
|                     ATTR_SLUG: addon.slug, |  | ||||||
|                     ATTR_DESCRIPTON: addon.description, |  | ||||||
|                     ATTR_STATE: await addon.state(), |  | ||||||
|                     ATTR_VERSION: addon.last_version, |  | ||||||
|                     ATTR_INSTALLED: addon.version_installed, |  | ||||||
|                     ATTR_REPOSITORY: addon.repository, |  | ||||||
|                     ATTR_LOGO: addon.with_logo, |  | ||||||
|                 }) |  | ||||||
|  |  | ||||||
|         return { |  | ||||||
|             ATTR_VERSION: HASSIO_VERSION, |  | ||||||
|             ATTR_LAST_VERSION: self.updater.version_hassio, |  | ||||||
|             ATTR_BETA_CHANNEL: self.updater.beta_channel, |  | ||||||
|             ATTR_ARCH: self.config.arch, |  | ||||||
|             ATTR_TIMEZONE: self.config.timezone, |  | ||||||
|             ATTR_ADDONS: list_addons, |  | ||||||
|             ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def options(self, request): |  | ||||||
|         """Set supervisor options.""" |  | ||||||
|         body = await api_validate(SCHEMA_OPTIONS, request) |  | ||||||
|  |  | ||||||
|         if ATTR_BETA_CHANNEL in body: |  | ||||||
|             self.updater.beta_channel = body[ATTR_BETA_CHANNEL] |  | ||||||
|  |  | ||||||
|         if ATTR_TIMEZONE in body: |  | ||||||
|             self.config.timezone = body[ATTR_TIMEZONE] |  | ||||||
|  |  | ||||||
|         if ATTR_ADDONS_REPOSITORIES in body: |  | ||||||
|             new = set(body[ATTR_ADDONS_REPOSITORIES]) |  | ||||||
|             await asyncio.shield(self.addons.load_repositories(new)) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def update(self, request): |  | ||||||
|         """Update supervisor OS.""" |  | ||||||
|         body = await api_validate(SCHEMA_VERSION, request) |  | ||||||
|         version = body.get(ATTR_VERSION, self.updater.version_hassio) |  | ||||||
|  |  | ||||||
|         if version == self.supervisor.version: |  | ||||||
|             raise RuntimeError("Version {} is already in use".format(version)) |  | ||||||
|  |  | ||||||
|         return await asyncio.shield( |  | ||||||
|             self.supervisor.update(version), loop=self.loop) |  | ||||||
|  |  | ||||||
|     @api_process |  | ||||||
|     async def reload(self, request): |  | ||||||
|         """Reload addons, config ect.""" |  | ||||||
|         tasks = [ |  | ||||||
|             self.addons.reload(), |  | ||||||
|             self.snapshots.reload(), |  | ||||||
|             self.updater.fetch_data(), |  | ||||||
|             self.host_control.load() |  | ||||||
|         ] |  | ||||||
|         results, _ = await asyncio.shield( |  | ||||||
|             asyncio.wait(tasks, loop=self.loop), loop=self.loop) |  | ||||||
|  |  | ||||||
|         for result in results: |  | ||||||
|             if result.exception() is not None: |  | ||||||
|                 raise RuntimeError("Some reload task fails!") |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |  | ||||||
|     def logs(self, request): |  | ||||||
|         """Return supervisor docker logs.""" |  | ||||||
|         return self.supervisor.logs() |  | ||||||
| @@ -1,120 +0,0 @@ | |||||||
| """Init file for HassIO util for rest api.""" |  | ||||||
| import json |  | ||||||
| import hashlib |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| from aiohttp import web |  | ||||||
| from aiohttp.web_exceptions import HTTPServiceUnavailable |  | ||||||
| import voluptuous as vol |  | ||||||
| from voluptuous.humanize import humanize_error |  | ||||||
|  |  | ||||||
| from ..const import ( |  | ||||||
|     JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR, |  | ||||||
|     CONTENT_TYPE_BINARY) |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def json_loads(data): |  | ||||||
|     """Extract json from string with support for '' and None.""" |  | ||||||
|     if not data: |  | ||||||
|         return {} |  | ||||||
|     try: |  | ||||||
|         return json.loads(data) |  | ||||||
|     except json.JSONDecodeError: |  | ||||||
|         raise RuntimeError("Invalid json") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def api_process(method): |  | ||||||
|     """Wrap function with true/false calls to rest api.""" |  | ||||||
|     async def wrap_api(api, *args, **kwargs): |  | ||||||
|         """Return api information.""" |  | ||||||
|         try: |  | ||||||
|             answer = await method(api, *args, **kwargs) |  | ||||||
|         except RuntimeError as err: |  | ||||||
|             return api_return_error(message=str(err)) |  | ||||||
|  |  | ||||||
|         if isinstance(answer, dict): |  | ||||||
|             return api_return_ok(data=answer) |  | ||||||
|         if isinstance(answer, web.Response): |  | ||||||
|             return answer |  | ||||||
|         elif answer: |  | ||||||
|             return api_return_ok() |  | ||||||
|         return api_return_error() |  | ||||||
|  |  | ||||||
|     return wrap_api |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def api_process_hostcontrol(method): |  | ||||||
|     """Wrap HostControl calls to rest api.""" |  | ||||||
|     async def wrap_hostcontrol(api, *args, **kwargs): |  | ||||||
|         """Return host information.""" |  | ||||||
|         if not api.host_control.active: |  | ||||||
|             raise HTTPServiceUnavailable() |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             answer = await method(api, *args, **kwargs) |  | ||||||
|         except RuntimeError as err: |  | ||||||
|             return api_return_error(message=str(err)) |  | ||||||
|  |  | ||||||
|         if isinstance(answer, dict): |  | ||||||
|             return api_return_ok(data=answer) |  | ||||||
|         elif answer is None: |  | ||||||
|             return api_return_error("Function is not supported") |  | ||||||
|         elif answer: |  | ||||||
|             return api_return_ok() |  | ||||||
|         return api_return_error() |  | ||||||
|  |  | ||||||
|     return wrap_hostcontrol |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def api_process_raw(content): |  | ||||||
|     """Wrap content_type into function.""" |  | ||||||
|     def wrap_method(method): |  | ||||||
|         """Wrap function with raw output to rest api.""" |  | ||||||
|         async def wrap_api(api, *args, **kwargs): |  | ||||||
|             """Return api information.""" |  | ||||||
|             try: |  | ||||||
|                 msg_data = await method(api, *args, **kwargs) |  | ||||||
|                 msg_type = content |  | ||||||
|             except RuntimeError as err: |  | ||||||
|                 msg_data = str(err).encode() |  | ||||||
|                 msg_type = CONTENT_TYPE_BINARY |  | ||||||
|  |  | ||||||
|             return web.Response(body=msg_data, content_type=msg_type) |  | ||||||
|  |  | ||||||
|         return wrap_api |  | ||||||
|     return wrap_method |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def api_return_error(message=None): |  | ||||||
|     """Return a API error message.""" |  | ||||||
|     return web.json_response({ |  | ||||||
|         JSON_RESULT: RESULT_ERROR, |  | ||||||
|         JSON_MESSAGE: message, |  | ||||||
|     }, status=400) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def api_return_ok(data=None): |  | ||||||
|     """Return a API ok answer.""" |  | ||||||
|     return web.json_response({ |  | ||||||
|         JSON_RESULT: RESULT_OK, |  | ||||||
|         JSON_DATA: data or {}, |  | ||||||
|     }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def api_validate(schema, request): |  | ||||||
|     """Validate request data with schema.""" |  | ||||||
|     data = await request.json(loads=json_loads) |  | ||||||
|     try: |  | ||||||
|         data = schema(data) |  | ||||||
|     except vol.Invalid as ex: |  | ||||||
|         raise RuntimeError(humanize_error(data, ex)) from None |  | ||||||
|  |  | ||||||
|     return data |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def hash_password(password): |  | ||||||
|     """Hash and salt our passwords.""" |  | ||||||
|     key = ")*()*SALT_HASSIO2123{}6554547485HSKA!!*JSLAfdasda$".format(password) |  | ||||||
|     return hashlib.sha256(key.encode()).hexdigest() |  | ||||||
| @@ -1,144 +0,0 @@ | |||||||
| """Bootstrap HassIO.""" |  | ||||||
| import logging |  | ||||||
| import os |  | ||||||
| import signal |  | ||||||
| import shutil |  | ||||||
| from pathlib import Path |  | ||||||
|  |  | ||||||
| from colorlog import ColoredFormatter |  | ||||||
|  |  | ||||||
| from .const import SOCKET_DOCKER |  | ||||||
| from .config import CoreConfig |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def initialize_system_data(): |  | ||||||
|     """Setup default config and create folders.""" |  | ||||||
|     config = CoreConfig() |  | ||||||
|  |  | ||||||
|     # homeassistant config folder |  | ||||||
|     if not config.path_config.is_dir(): |  | ||||||
|         _LOGGER.info( |  | ||||||
|             "Create Home-Assistant config folder %s", config.path_config) |  | ||||||
|         config.path_config.mkdir() |  | ||||||
|  |  | ||||||
|     # hassio ssl folder |  | ||||||
|     if not config.path_ssl.is_dir(): |  | ||||||
|         _LOGGER.info("Create hassio ssl folder %s", config.path_ssl) |  | ||||||
|         config.path_ssl.mkdir() |  | ||||||
|  |  | ||||||
|     # hassio addon data folder |  | ||||||
|     if not config.path_addons_data.is_dir(): |  | ||||||
|         _LOGGER.info( |  | ||||||
|             "Create hassio addon data folder %s", config.path_addons_data) |  | ||||||
|         config.path_addons_data.mkdir(parents=True) |  | ||||||
|  |  | ||||||
|     if not config.path_addons_local.is_dir(): |  | ||||||
|         _LOGGER.info("Create hassio addon local repository folder %s", |  | ||||||
|                      config.path_addons_local) |  | ||||||
|         config.path_addons_local.mkdir(parents=True) |  | ||||||
|  |  | ||||||
|     if not config.path_addons_git.is_dir(): |  | ||||||
|         _LOGGER.info("Create hassio addon git repositories folder %s", |  | ||||||
|                      config.path_addons_git) |  | ||||||
|         config.path_addons_git.mkdir(parents=True) |  | ||||||
|  |  | ||||||
|     # hassio tmp folder |  | ||||||
|     if not config.path_tmp.is_dir(): |  | ||||||
|         _LOGGER.info("Create hassio temp folder %s", config.path_tmp) |  | ||||||
|         config.path_tmp.mkdir(parents=True) |  | ||||||
|  |  | ||||||
|     # hassio backup folder |  | ||||||
|     if not config.path_backup.is_dir(): |  | ||||||
|         _LOGGER.info("Create hassio backup folder %s", config.path_backup) |  | ||||||
|         config.path_backup.mkdir() |  | ||||||
|  |  | ||||||
|     # share folder |  | ||||||
|     if not config.path_share.is_dir(): |  | ||||||
|         _LOGGER.info("Create hassio share folder %s", config.path_share) |  | ||||||
|         config.path_share.mkdir() |  | ||||||
|  |  | ||||||
|     return config |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_system_env(config): |  | ||||||
|     """Cleanup some stuff after update.""" |  | ||||||
|  |  | ||||||
|     # hass.io 0.37 -> 0.38 |  | ||||||
|     old_build = Path(config.path_hassio, "addons/build") |  | ||||||
|     if old_build.is_dir(): |  | ||||||
|         try: |  | ||||||
|             old_build.rmdir() |  | ||||||
|         except OSError: |  | ||||||
|             _LOGGER.warning("Can't cleanup old addons build dir.") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def initialize_logging(): |  | ||||||
|     """Setup the logging.""" |  | ||||||
|     logging.basicConfig(level=logging.INFO) |  | ||||||
|     fmt = ("%(asctime)s %(levelname)s (%(threadName)s) " |  | ||||||
|            "[%(name)s] %(message)s") |  | ||||||
|     colorfmt = "%(log_color)s{}%(reset)s".format(fmt) |  | ||||||
|     datefmt = '%y-%m-%d %H:%M:%S' |  | ||||||
|  |  | ||||||
|     # suppress overly verbose logs from libraries that aren't helpful |  | ||||||
|     logging.getLogger("aiohttp.access").setLevel(logging.WARNING) |  | ||||||
|  |  | ||||||
|     logging.getLogger().handlers[0].setFormatter(ColoredFormatter( |  | ||||||
|         colorfmt, |  | ||||||
|         datefmt=datefmt, |  | ||||||
|         reset=True, |  | ||||||
|         log_colors={ |  | ||||||
|             'DEBUG': 'cyan', |  | ||||||
|             'INFO': 'green', |  | ||||||
|             'WARNING': 'yellow', |  | ||||||
|             'ERROR': 'red', |  | ||||||
|             'CRITICAL': 'red', |  | ||||||
|         } |  | ||||||
|     )) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def check_environment(): |  | ||||||
|     """Check if all environment are exists.""" |  | ||||||
|     # check environment variables |  | ||||||
|     for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME', |  | ||||||
|                 'HOMEASSISTANT_REPOSITORY'): |  | ||||||
|         try: |  | ||||||
|             os.environ[key] |  | ||||||
|         except KeyError: |  | ||||||
|             _LOGGER.fatal("Can't find %s in env!", key) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|     # check docker socket |  | ||||||
|     if not SOCKET_DOCKER.is_socket(): |  | ||||||
|         _LOGGER.fatal("Can't find docker socket!") |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     # check socat exec |  | ||||||
|     if not shutil.which('socat'): |  | ||||||
|         _LOGGER.fatal("Can0t find socat program!") |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def reg_signal(loop): |  | ||||||
|     """Register SIGTERM, SIGKILL to stop system.""" |  | ||||||
|     try: |  | ||||||
|         loop.add_signal_handler( |  | ||||||
|             signal.SIGTERM, lambda: loop.call_soon(loop.stop)) |  | ||||||
|     except (ValueError, RuntimeError): |  | ||||||
|         _LOGGER.warning("Could not bind to SIGTERM") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         loop.add_signal_handler( |  | ||||||
|             signal.SIGHUP, lambda: loop.call_soon(loop.stop)) |  | ||||||
|     except (ValueError, RuntimeError): |  | ||||||
|         _LOGGER.warning("Could not bind to SIGHUP") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         loop.add_signal_handler( |  | ||||||
|             signal.SIGINT, lambda: loop.call_soon(loop.stop)) |  | ||||||
|     except (ValueError, RuntimeError): |  | ||||||
|         _LOGGER.warning("Could not bind to SIGINT") |  | ||||||
							
								
								
									
										246
									
								
								hassio/config.py
									
									
									
									
									
								
							
							
						
						
									
										246
									
								
								hassio/config.py
									
									
									
									
									
								
							| @@ -1,246 +0,0 @@ | |||||||
| """Bootstrap HassIO.""" |  | ||||||
| from datetime import datetime |  | ||||||
| import logging |  | ||||||
| import os |  | ||||||
| from pathlib import Path, PurePath |  | ||||||
|  |  | ||||||
| from .const import ( |  | ||||||
|     FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS, |  | ||||||
|     ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST, |  | ||||||
|     ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT) |  | ||||||
| from .tools import JsonConfig, parse_datetime |  | ||||||
| from .validate import SCHEMA_HASSIO_CONFIG |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| HOMEASSISTANT_CONFIG = PurePath("homeassistant") |  | ||||||
|  |  | ||||||
| HASSIO_SSL = PurePath("ssl") |  | ||||||
|  |  | ||||||
| ADDONS_CORE = PurePath("addons/core") |  | ||||||
| ADDONS_LOCAL = PurePath("addons/local") |  | ||||||
| ADDONS_GIT = PurePath("addons/git") |  | ||||||
| ADDONS_DATA = PurePath("addons/data") |  | ||||||
|  |  | ||||||
| BACKUP_DATA = PurePath("backup") |  | ||||||
| SHARE_DATA = PurePath("share") |  | ||||||
| TMP_DATA = PurePath("tmp") |  | ||||||
|  |  | ||||||
| DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CoreConfig(JsonConfig): |  | ||||||
|     """Hold all core config data.""" |  | ||||||
|  |  | ||||||
|     def __init__(self): |  | ||||||
|         """Initialize config object.""" |  | ||||||
|         super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG) |  | ||||||
|         self.arch = None |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def timezone(self): |  | ||||||
|         """Return system timezone.""" |  | ||||||
|         return self._data[ATTR_TIMEZONE] |  | ||||||
|  |  | ||||||
|     @timezone.setter |  | ||||||
|     def timezone(self, value): |  | ||||||
|         """Set system timezone.""" |  | ||||||
|         self._data[ATTR_TIMEZONE] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def last_boot(self): |  | ||||||
|         """Return last boot datetime.""" |  | ||||||
|         boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME) |  | ||||||
|  |  | ||||||
|         boot_time = parse_datetime(boot_str) |  | ||||||
|         if not boot_time: |  | ||||||
|             return datetime.utcfromtimestamp(1) |  | ||||||
|         return boot_time |  | ||||||
|  |  | ||||||
|     @last_boot.setter |  | ||||||
|     def last_boot(self, value): |  | ||||||
|         """Set last boot datetime.""" |  | ||||||
|         self._data[ATTR_LAST_BOOT] = value.isoformat() |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_hassio(self): |  | ||||||
|         """Return hassio data path.""" |  | ||||||
|         return HASSIO_DATA |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_hassio(self): |  | ||||||
|         """Return hassio data path extern for docker.""" |  | ||||||
|         return PurePath(os.environ['SUPERVISOR_SHARE']) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_config(self): |  | ||||||
|         """Return config path extern for docker.""" |  | ||||||
|         return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG)) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_config(self): |  | ||||||
|         """Return config path inside supervisor.""" |  | ||||||
|         return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_ssl(self): |  | ||||||
|         """Return SSL path extern for docker.""" |  | ||||||
|         return str(PurePath(self.path_extern_hassio, HASSIO_SSL)) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_ssl(self): |  | ||||||
|         """Return SSL path inside supervisor.""" |  | ||||||
|         return Path(HASSIO_DATA, HASSIO_SSL) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_addons_core(self): |  | ||||||
|         """Return git path for core addons.""" |  | ||||||
|         return Path(HASSIO_DATA, ADDONS_CORE) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_addons_git(self): |  | ||||||
|         """Return path for git addons.""" |  | ||||||
|         return Path(HASSIO_DATA, ADDONS_GIT) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_addons_local(self): |  | ||||||
|         """Return path for customs addons.""" |  | ||||||
|         return Path(HASSIO_DATA, ADDONS_LOCAL) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_addons_local(self): |  | ||||||
|         """Return path for customs addons.""" |  | ||||||
|         return PurePath(self.path_extern_hassio, ADDONS_LOCAL) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_addons_data(self): |  | ||||||
|         """Return root addon data folder.""" |  | ||||||
|         return Path(HASSIO_DATA, ADDONS_DATA) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_addons_data(self): |  | ||||||
|         """Return root addon data folder extern for docker.""" |  | ||||||
|         return PurePath(self.path_extern_hassio, ADDONS_DATA) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_tmp(self): |  | ||||||
|         """Return hass.io temp folder.""" |  | ||||||
|         return Path(HASSIO_DATA, TMP_DATA) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_backup(self): |  | ||||||
|         """Return root backup data folder.""" |  | ||||||
|         return Path(HASSIO_DATA, BACKUP_DATA) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_backup(self): |  | ||||||
|         """Return root backup data folder extern for docker.""" |  | ||||||
|         return PurePath(self.path_extern_hassio, BACKUP_DATA) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_share(self): |  | ||||||
|         """Return root share data folder.""" |  | ||||||
|         return Path(HASSIO_DATA, SHARE_DATA) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def path_extern_share(self): |  | ||||||
|         """Return root share data folder extern for docker.""" |  | ||||||
|         return PurePath(self.path_extern_hassio, SHARE_DATA) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def addons_repositories(self): |  | ||||||
|         """Return list of addons custom repositories.""" |  | ||||||
|         return self._data[ATTR_ADDONS_CUSTOM_LIST] |  | ||||||
|  |  | ||||||
|     def add_addon_repository(self, repo): |  | ||||||
|         """Add a custom repository to list.""" |  | ||||||
|         if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo) |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     def drop_addon_repository(self, repo): |  | ||||||
|         """Remove a custom repository from list.""" |  | ||||||
|         if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo) |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def security_initialize(self): |  | ||||||
|         """Return is security was initialize.""" |  | ||||||
|         return self._data[ATTR_SECURITY] |  | ||||||
|  |  | ||||||
|     @security_initialize.setter |  | ||||||
|     def security_initialize(self, value): |  | ||||||
|         """Set is security initialize.""" |  | ||||||
|         self._data[ATTR_SECURITY] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def security_totp(self): |  | ||||||
|         """Return the TOTP key.""" |  | ||||||
|         return self._data.get(ATTR_TOTP) |  | ||||||
|  |  | ||||||
|     @security_totp.setter |  | ||||||
|     def security_totp(self, value): |  | ||||||
|         """Set the TOTP key.""" |  | ||||||
|         self._data[ATTR_TOTP] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def security_password(self): |  | ||||||
|         """Return the password key.""" |  | ||||||
|         return self._data.get(ATTR_PASSWORD) |  | ||||||
|  |  | ||||||
|     @security_password.setter |  | ||||||
|     def security_password(self, value): |  | ||||||
|         """Set the password key.""" |  | ||||||
|         self._data[ATTR_PASSWORD] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def security_sessions(self): |  | ||||||
|         """Return api sessions.""" |  | ||||||
|         return { |  | ||||||
|             session: parse_datetime(until) for |  | ||||||
|             session, until in self._data[ATTR_SESSIONS].items() |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     def add_security_session(self, session, valid): |  | ||||||
|         """Set the a new session.""" |  | ||||||
|         self._data[ATTR_SESSIONS].update( |  | ||||||
|             {session: valid.isoformat()} |  | ||||||
|         ) |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     def drop_security_session(self, session): |  | ||||||
|         """Delete the a session.""" |  | ||||||
|         self._data[ATTR_SESSIONS].pop(session, None) |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def audio_output(self): |  | ||||||
|         """Return ALSA audio output card,dev.""" |  | ||||||
|         return self._data.get(ATTR_AUDIO_OUTPUT) |  | ||||||
|  |  | ||||||
|     @audio_output.setter |  | ||||||
|     def audio_output(self, value): |  | ||||||
|         """Set ALSA audio output card,dev.""" |  | ||||||
|         self._data[ATTR_AUDIO_OUTPUT] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def audio_input(self): |  | ||||||
|         """Return ALSA audio input card,dev.""" |  | ||||||
|         return self._data.get(ATTR_AUDIO_INPUT) |  | ||||||
|  |  | ||||||
|     @audio_input.setter |  | ||||||
|     def audio_input(self, value): |  | ||||||
|         """Set ALSA audio input card,dev.""" |  | ||||||
|         self._data[ATTR_AUDIO_INPUT] = value |  | ||||||
|         self.save() |  | ||||||
							
								
								
									
										172
									
								
								hassio/const.py
									
									
									
									
									
								
							
							
						
						
									
										172
									
								
								hassio/const.py
									
									
									
									
									
								
							| @@ -1,172 +0,0 @@ | |||||||
| """Const file for HassIO.""" |  | ||||||
| from pathlib import Path |  | ||||||
| from ipaddress import ip_network |  | ||||||
|  |  | ||||||
| HASSIO_VERSION = '0.76' |  | ||||||
|  |  | ||||||
| URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/' |  | ||||||
|                       'hassio/{}/version.json') |  | ||||||
|  |  | ||||||
| URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons' |  | ||||||
|  |  | ||||||
| HASSIO_DATA = Path("/data") |  | ||||||
|  |  | ||||||
| RUN_UPDATE_INFO_TASKS = 28800 |  | ||||||
| RUN_UPDATE_SUPERVISOR_TASKS = 29100 |  | ||||||
| RUN_UPDATE_ADDONS_TASKS = 57600 |  | ||||||
| RUN_RELOAD_ADDONS_TASKS = 28800 |  | ||||||
| RUN_RELOAD_SNAPSHOTS_TASKS = 72000 |  | ||||||
| RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15 |  | ||||||
| RUN_WATCHDOG_HOMEASSISTANT_API = 300 |  | ||||||
| RUN_CLEANUP_API_SESSIONS = 900 |  | ||||||
|  |  | ||||||
| FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json") |  | ||||||
| FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json") |  | ||||||
| FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json") |  | ||||||
| FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json") |  | ||||||
|  |  | ||||||
| SOCKET_DOCKER = Path("/var/run/docker.sock") |  | ||||||
| SOCKET_HC = Path("/var/run/hassio-hc.sock") |  | ||||||
|  |  | ||||||
| DOCKER_NETWORK = 'hassio' |  | ||||||
| DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23') |  | ||||||
| DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24') |  | ||||||
|  |  | ||||||
| LABEL_VERSION = 'io.hass.version' |  | ||||||
| LABEL_ARCH = 'io.hass.arch' |  | ||||||
| LABEL_TYPE = 'io.hass.type' |  | ||||||
|  |  | ||||||
| META_ADDON = 'addon' |  | ||||||
| META_SUPERVISOR = 'supervisor' |  | ||||||
| META_HOMEASSISTANT = 'homeassistant' |  | ||||||
|  |  | ||||||
| JSON_RESULT = 'result' |  | ||||||
| JSON_DATA = 'data' |  | ||||||
| JSON_MESSAGE = 'message' |  | ||||||
|  |  | ||||||
| RESULT_ERROR = 'error' |  | ||||||
| RESULT_OK = 'ok' |  | ||||||
|  |  | ||||||
| CONTENT_TYPE_BINARY = 'application/octet-stream' |  | ||||||
| CONTENT_TYPE_PNG = 'image/png' |  | ||||||
| CONTENT_TYPE_JSON = 'application/json' |  | ||||||
| CONTENT_TYPE_TEXT = 'text/plain' |  | ||||||
| HEADER_HA_ACCESS = 'x-ha-access' |  | ||||||
|  |  | ||||||
| ATTR_WATCHDOG = 'watchdog' |  | ||||||
| ATTR_CHANGELOG = 'changelog' |  | ||||||
| ATTR_DATE = 'date' |  | ||||||
| ATTR_ARCH = 'arch' |  | ||||||
| ATTR_HOSTNAME = 'hostname' |  | ||||||
| ATTR_TIMEZONE = 'timezone' |  | ||||||
| ATTR_ARGS = 'args' |  | ||||||
| ATTR_OS = 'os' |  | ||||||
| ATTR_TYPE = 'type' |  | ||||||
| ATTR_SOURCE = 'source' |  | ||||||
| ATTR_FEATURES = 'features' |  | ||||||
| ATTR_ADDONS = 'addons' |  | ||||||
| ATTR_VERSION = 'version' |  | ||||||
| ATTR_AUTO_UART = 'auto_uart' |  | ||||||
| ATTR_LAST_BOOT = 'last_boot' |  | ||||||
| ATTR_LAST_VERSION = 'last_version' |  | ||||||
| ATTR_BETA_CHANNEL = 'beta_channel' |  | ||||||
| ATTR_NAME = 'name' |  | ||||||
| ATTR_SLUG = 'slug' |  | ||||||
| ATTR_DESCRIPTON = 'description' |  | ||||||
| ATTR_STARTUP = 'startup' |  | ||||||
| ATTR_BOOT = 'boot' |  | ||||||
| ATTR_PORTS = 'ports' |  | ||||||
| ATTR_PORT = 'port' |  | ||||||
| ATTR_SSL = 'ssl' |  | ||||||
| ATTR_MAP = 'map' |  | ||||||
| ATTR_WEBUI = 'webui' |  | ||||||
| ATTR_OPTIONS = 'options' |  | ||||||
| ATTR_INSTALLED = 'installed' |  | ||||||
| ATTR_DETACHED = 'detached' |  | ||||||
| ATTR_STATE = 'state' |  | ||||||
| ATTR_SCHEMA = 'schema' |  | ||||||
| ATTR_IMAGE = 'image' |  | ||||||
| ATTR_LOGO = 'logo' |  | ||||||
| ATTR_STDIN = 'stdin' |  | ||||||
| ATTR_ADDONS_REPOSITORIES = 'addons_repositories' |  | ||||||
| ATTR_REPOSITORY = 'repository' |  | ||||||
| ATTR_REPOSITORIES = 'repositories' |  | ||||||
| ATTR_URL = 'url' |  | ||||||
| ATTR_MAINTAINER = 'maintainer' |  | ||||||
| ATTR_PASSWORD = 'password' |  | ||||||
| ATTR_TOTP = 'totp' |  | ||||||
| ATTR_INITIALIZE = 'initialize' |  | ||||||
| ATTR_SESSION = 'session' |  | ||||||
| ATTR_SESSIONS = 'sessions' |  | ||||||
| ATTR_LOCATON = 'location' |  | ||||||
| ATTR_BUILD = 'build' |  | ||||||
| ATTR_DEVICES = 'devices' |  | ||||||
| ATTR_ENVIRONMENT = 'environment' |  | ||||||
| ATTR_HOST_NETWORK = 'host_network' |  | ||||||
| ATTR_HOST_IPC = 'host_ipc' |  | ||||||
| ATTR_HOST_DBUS = 'host_dbus' |  | ||||||
| ATTR_NETWORK = 'network' |  | ||||||
| ATTR_TMPFS = 'tmpfs' |  | ||||||
| ATTR_PRIVILEGED = 'privileged' |  | ||||||
| ATTR_USER = 'user' |  | ||||||
| ATTR_SYSTEM = 'system' |  | ||||||
| ATTR_SNAPSHOTS = 'snapshots' |  | ||||||
| ATTR_HOMEASSISTANT = 'homeassistant' |  | ||||||
| ATTR_HASSIO = 'hassio' |  | ||||||
| ATTR_HASSIO_API = 'hassio_api' |  | ||||||
| ATTR_HOMEASSISTANT_API = 'homeassistant_api' |  | ||||||
| ATTR_UUID = 'uuid' |  | ||||||
| ATTR_FOLDERS = 'folders' |  | ||||||
| ATTR_SIZE = 'size' |  | ||||||
| ATTR_TYPE = 'type' |  | ||||||
| ATTR_TIMEOUT = 'timeout' |  | ||||||
| ATTR_AUTO_UPDATE = 'auto_update' |  | ||||||
| ATTR_CUSTOM = 'custom' |  | ||||||
| ATTR_AUDIO = 'audio' |  | ||||||
| ATTR_AUDIO_INPUT = 'audio_input' |  | ||||||
| ATTR_AUDIO_OUTPUT = 'audio_output' |  | ||||||
| ATTR_INPUT = 'input' |  | ||||||
| ATTR_OUTPUT = 'output' |  | ||||||
| ATTR_DISK = 'disk' |  | ||||||
| ATTR_SERIAL = 'serial' |  | ||||||
| ATTR_SECURITY = 'security' |  | ||||||
| ATTR_BUILD_FROM = 'build_from' |  | ||||||
| ATTR_SQUASH = 'squash' |  | ||||||
| ATTR_GPIO = 'gpio' |  | ||||||
| ATTR_LEGACY = 'legacy' |  | ||||||
| ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list' |  | ||||||
|  |  | ||||||
| STARTUP_INITIALIZE = 'initialize' |  | ||||||
| STARTUP_SYSTEM = 'system' |  | ||||||
| STARTUP_SERVICES = 'services' |  | ||||||
| STARTUP_APPLICATION = 'application' |  | ||||||
| STARTUP_ONCE = 'once' |  | ||||||
|  |  | ||||||
| BOOT_AUTO = 'auto' |  | ||||||
| BOOT_MANUAL = 'manual' |  | ||||||
|  |  | ||||||
| STATE_STARTED = 'started' |  | ||||||
| STATE_STOPPED = 'stopped' |  | ||||||
| STATE_NONE = 'none' |  | ||||||
|  |  | ||||||
| MAP_CONFIG = 'config' |  | ||||||
| MAP_SSL = 'ssl' |  | ||||||
| MAP_ADDONS = 'addons' |  | ||||||
| MAP_BACKUP = 'backup' |  | ||||||
| MAP_SHARE = 'share' |  | ||||||
|  |  | ||||||
| ARCH_ARMHF = 'armhf' |  | ||||||
| ARCH_AARCH64 = 'aarch64' |  | ||||||
| ARCH_AMD64 = 'amd64' |  | ||||||
| ARCH_I386 = 'i386' |  | ||||||
|  |  | ||||||
| REPOSITORY_CORE = 'core' |  | ||||||
| REPOSITORY_LOCAL = 'local' |  | ||||||
|  |  | ||||||
| FOLDER_HOMEASSISTANT = 'homeassistant' |  | ||||||
| FOLDER_SHARE = 'share' |  | ||||||
| FOLDER_ADDONS = 'addons/local' |  | ||||||
| FOLDER_SSL = 'ssl' |  | ||||||
|  |  | ||||||
| SNAPSHOT_FULL = 'full' |  | ||||||
| SNAPSHOT_PARTIAL = 'partial' |  | ||||||
							
								
								
									
										190
									
								
								hassio/core.py
									
									
									
									
									
								
							
							
						
						
									
										190
									
								
								hassio/core.py
									
									
									
									
									
								
							| @@ -1,190 +0,0 @@ | |||||||
| """Main file for HassIO.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import aiohttp |  | ||||||
|  |  | ||||||
| from .addons import AddonManager |  | ||||||
| from .api import RestAPI |  | ||||||
| from .host_control import HostControl |  | ||||||
| from .const import ( |  | ||||||
|     RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS, |  | ||||||
|     RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT_DOCKER, |  | ||||||
|     RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES, |  | ||||||
|     STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS, |  | ||||||
|     RUN_UPDATE_ADDONS_TASKS) |  | ||||||
| from .hardware import Hardware |  | ||||||
| from .homeassistant import HomeAssistant |  | ||||||
| from .scheduler import Scheduler |  | ||||||
| from .dock import DockerAPI |  | ||||||
| from .dock.supervisor import DockerSupervisor |  | ||||||
| from .dns import DNSForward |  | ||||||
| from .snapshots import SnapshotsManager |  | ||||||
| from .updater import Updater |  | ||||||
| from .tasks import ( |  | ||||||
|     hassio_update, homeassistant_watchdog_docker, api_sessions_cleanup, |  | ||||||
|     addons_update) |  | ||||||
| from .tools import fetch_timezone |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class HassIO(object): |  | ||||||
|     """Main object of hassio.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, loop, config): |  | ||||||
|         """Initialize hassio object.""" |  | ||||||
|         self.exit_code = 0 |  | ||||||
|         self.loop = loop |  | ||||||
|         self.config = config |  | ||||||
|         self.websession = aiohttp.ClientSession(loop=loop) |  | ||||||
|         self.updater = Updater(config, loop, self.websession) |  | ||||||
|         self.scheduler = Scheduler(loop) |  | ||||||
|         self.api = RestAPI(config, loop) |  | ||||||
|         self.hardware = Hardware() |  | ||||||
|         self.docker = DockerAPI(self.hardware) |  | ||||||
|         self.dns = DNSForward(loop) |  | ||||||
|  |  | ||||||
|         # init basic docker container |  | ||||||
|         self.supervisor = DockerSupervisor( |  | ||||||
|             config, loop, self.docker, self.stop) |  | ||||||
|  |  | ||||||
|         # init homeassistant |  | ||||||
|         self.homeassistant = HomeAssistant( |  | ||||||
|             config, loop, self.docker, self.updater) |  | ||||||
|  |  | ||||||
|         # init HostControl |  | ||||||
|         self.host_control = HostControl(loop) |  | ||||||
|  |  | ||||||
|         # init addon system |  | ||||||
|         self.addons = AddonManager(config, loop, self.docker) |  | ||||||
|  |  | ||||||
|         # init snapshot system |  | ||||||
|         self.snapshots = SnapshotsManager( |  | ||||||
|             config, loop, self.scheduler, self.addons, self.homeassistant) |  | ||||||
|  |  | ||||||
|     async def setup(self): |  | ||||||
|         """Setup HassIO orchestration.""" |  | ||||||
|         # supervisor |  | ||||||
|         if not await self.supervisor.attach(): |  | ||||||
|             _LOGGER.fatal("Can't setup supervisor docker container!") |  | ||||||
|         await self.supervisor.cleanup() |  | ||||||
|  |  | ||||||
|         # set running arch |  | ||||||
|         self.config.arch = self.supervisor.arch |  | ||||||
|  |  | ||||||
|         # update timezone |  | ||||||
|         if self.config.timezone == 'UTC': |  | ||||||
|             self.config.timezone = await fetch_timezone(self.websession) |  | ||||||
|  |  | ||||||
|         # hostcontrol |  | ||||||
|         await self.host_control.load() |  | ||||||
|  |  | ||||||
|         # schedule update info tasks |  | ||||||
|         self.scheduler.register_task( |  | ||||||
|             self.host_control.load, RUN_UPDATE_INFO_TASKS) |  | ||||||
|  |  | ||||||
|         # rest api views |  | ||||||
|         self.api.register_host(self.host_control, self.hardware) |  | ||||||
|         self.api.register_network(self.host_control) |  | ||||||
|         self.api.register_supervisor( |  | ||||||
|             self.supervisor, self.snapshots, self.addons, self.host_control, |  | ||||||
|             self.updater) |  | ||||||
|         self.api.register_homeassistant(self.homeassistant) |  | ||||||
|         self.api.register_addons(self.addons) |  | ||||||
|         self.api.register_security() |  | ||||||
|         self.api.register_snapshots(self.snapshots) |  | ||||||
|         self.api.register_panel() |  | ||||||
|  |  | ||||||
|         # schedule api session cleanup |  | ||||||
|         self.scheduler.register_task( |  | ||||||
|             api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS, |  | ||||||
|             now=True) |  | ||||||
|  |  | ||||||
|         # Load homeassistant |  | ||||||
|         await self.homeassistant.prepare() |  | ||||||
|  |  | ||||||
|         # Load addons |  | ||||||
|         await self.addons.prepare() |  | ||||||
|  |  | ||||||
|         # schedule addon update task |  | ||||||
|         self.scheduler.register_task( |  | ||||||
|             self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True) |  | ||||||
|         self.scheduler.register_task( |  | ||||||
|             addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS) |  | ||||||
|  |  | ||||||
|         # schedule self update task |  | ||||||
|         self.scheduler.register_task( |  | ||||||
|             hassio_update(self.supervisor, self.updater), |  | ||||||
|             RUN_UPDATE_SUPERVISOR_TASKS) |  | ||||||
|  |  | ||||||
|         # schedule snapshot update tasks |  | ||||||
|         self.scheduler.register_task( |  | ||||||
|             self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True) |  | ||||||
|  |  | ||||||
|         # start dns forwarding |  | ||||||
|         self.loop.create_task(self.dns.start()) |  | ||||||
|  |  | ||||||
|         # start addon mark as initialize |  | ||||||
|         await self.addons.auto_boot(STARTUP_INITIALIZE) |  | ||||||
|  |  | ||||||
|     async def start(self): |  | ||||||
|         """Start HassIO orchestration.""" |  | ||||||
|         # on release channel, try update itself |  | ||||||
|         # on beta channel, only read new versions |  | ||||||
|         await asyncio.wait( |  | ||||||
|             [hassio_update(self.supervisor, self.updater)()], |  | ||||||
|             loop=self.loop |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # start api |  | ||||||
|         await self.api.start() |  | ||||||
|         _LOGGER.info("Start hassio api on %s", self.docker.network.supervisor) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             # HomeAssistant is already running / supervisor have only reboot |  | ||||||
|             if self.hardware.last_boot == self.config.last_boot: |  | ||||||
|                 _LOGGER.info("HassIO reboot detected") |  | ||||||
|                 return |  | ||||||
|  |  | ||||||
|             # start addon mark as system |  | ||||||
|             await self.addons.auto_boot(STARTUP_SYSTEM) |  | ||||||
|  |  | ||||||
|             # start addon mark as services |  | ||||||
|             await self.addons.auto_boot(STARTUP_SERVICES) |  | ||||||
|  |  | ||||||
|             # run HomeAssistant |  | ||||||
|             if self.homeassistant.boot: |  | ||||||
|                 await self.homeassistant.run() |  | ||||||
|  |  | ||||||
|             # start addon mark as application |  | ||||||
|             await self.addons.auto_boot(STARTUP_APPLICATION) |  | ||||||
|  |  | ||||||
|             # store new last boot |  | ||||||
|             self.config.last_boot = self.hardware.last_boot |  | ||||||
|  |  | ||||||
|         finally: |  | ||||||
|             # schedule homeassistant watchdog |  | ||||||
|             self.scheduler.register_task( |  | ||||||
|                 homeassistant_watchdog_docker(self.loop, self.homeassistant), |  | ||||||
|                 RUN_WATCHDOG_HOMEASSISTANT_DOCKER) |  | ||||||
|  |  | ||||||
|             # self.scheduler.register_task( |  | ||||||
|             #    homeassistant_watchdog_api(self.loop, self.homeassistant), |  | ||||||
|             #    RUN_WATCHDOG_HOMEASSISTANT_API) |  | ||||||
|  |  | ||||||
|             # If landingpage / run upgrade in background |  | ||||||
|             if self.homeassistant.version == 'landingpage': |  | ||||||
|                 self.loop.create_task(self.homeassistant.install()) |  | ||||||
|  |  | ||||||
|     async def stop(self): |  | ||||||
|         """Stop a running orchestration.""" |  | ||||||
|         # don't process scheduler anymore |  | ||||||
|         self.scheduler.suspend = True |  | ||||||
|  |  | ||||||
|         # process stop tasks |  | ||||||
|         self.websession.close() |  | ||||||
|         self.homeassistant.websession.close() |  | ||||||
|  |  | ||||||
|         # process async stop tasks |  | ||||||
|         await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop) |  | ||||||
| @@ -1,42 +0,0 @@ | |||||||
| """Setup the internal DNS service for host applications.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
| import shlex |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DNSForward(object): |  | ||||||
|     """Manage DNS forwarding to internal DNS.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, loop): |  | ||||||
|         """Initialize DNS forwarding.""" |  | ||||||
|         self.loop = loop |  | ||||||
|         self.proc = None |  | ||||||
|  |  | ||||||
|     async def start(self): |  | ||||||
|         """Start DNS forwarding.""" |  | ||||||
|         try: |  | ||||||
|             self.proc = await asyncio.create_subprocess_exec( |  | ||||||
|                 *shlex.split(COMMAND), |  | ||||||
|                 stdin=asyncio.subprocess.DEVNULL, |  | ||||||
|                 stdout=asyncio.subprocess.DEVNULL, |  | ||||||
|                 stderr=asyncio.subprocess.DEVNULL, |  | ||||||
|                 loop=self.loop |  | ||||||
|             ) |  | ||||||
|         except OSError as err: |  | ||||||
|             _LOGGER.error("Can't start DNS forwarding -> %s", err) |  | ||||||
|         else: |  | ||||||
|             _LOGGER.info("Start DNS port forwarding for host add-ons") |  | ||||||
|  |  | ||||||
|     async def stop(self): |  | ||||||
|         """Stop DNS forwarding.""" |  | ||||||
|         if not self.proc: |  | ||||||
|             _LOGGER.warning("DNS forwarding is not running!") |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         self.proc.kill() |  | ||||||
|         await self.proc.wait() |  | ||||||
|         _LOGGER.info("Stop DNS forwarding") |  | ||||||
| @@ -1,109 +0,0 @@ | |||||||
| """Init file for HassIO docker object.""" |  | ||||||
| from contextlib import suppress |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import docker |  | ||||||
|  |  | ||||||
| from .network import DockerNetwork |  | ||||||
| from ..const import SOCKET_DOCKER |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerAPI(object): |  | ||||||
|     """Docker hassio wrapper. |  | ||||||
|  |  | ||||||
|     This class is not AsyncIO safe! |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def __init__(self, hardware): |  | ||||||
|         """Initialize docker base wrapper.""" |  | ||||||
|         self.docker = docker.DockerClient( |  | ||||||
|             base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto') |  | ||||||
|         self.network = DockerNetwork(self.docker) |  | ||||||
|         self.hardware = hardware |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def images(self): |  | ||||||
|         """Return api images.""" |  | ||||||
|         return self.docker.images |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def containers(self): |  | ||||||
|         """Return api containers.""" |  | ||||||
|         return self.docker.containers |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def api(self): |  | ||||||
|         """Return api containers.""" |  | ||||||
|         return self.docker.api |  | ||||||
|  |  | ||||||
|     def run(self, image, **kwargs): |  | ||||||
|         """"Create a docker and run it. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         name = kwargs.get('name', image) |  | ||||||
|         network_mode = kwargs.get('network_mode') |  | ||||||
|         hostname = kwargs.get('hostname') |  | ||||||
|  |  | ||||||
|         # setup network |  | ||||||
|         if network_mode: |  | ||||||
|             kwargs['dns'] = [str(self.network.supervisor)] |  | ||||||
|         else: |  | ||||||
|             kwargs['network'] = None |  | ||||||
|  |  | ||||||
|         # create container |  | ||||||
|         try: |  | ||||||
|             container = self.docker.containers.create(image, **kwargs) |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.error("Can't create container from %s -> %s", name, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # attach network |  | ||||||
|         if not network_mode: |  | ||||||
|             alias = [hostname] if hostname else None |  | ||||||
|             if self.network.attach_container(container, alias=alias): |  | ||||||
|                 self.network.detach_default_bridge(container) |  | ||||||
|             else: |  | ||||||
|                 _LOGGER.warning("Can't attach %s to hassio-net!", name) |  | ||||||
|  |  | ||||||
|         # run container |  | ||||||
|         try: |  | ||||||
|             container.start() |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.error("Can't start %s -> %s", name, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def run_command(self, image, command=None, **kwargs): |  | ||||||
|         """Create a temporary container and run command. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         stdout = kwargs.get('stdout', True) |  | ||||||
|         stderr = kwargs.get('stderr', True) |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Run command '%s' on %s", command, image) |  | ||||||
|         try: |  | ||||||
|             container = self.docker.containers.run( |  | ||||||
|                 image, |  | ||||||
|                 command=command, |  | ||||||
|                 network=self.network.name, |  | ||||||
|                 **kwargs |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             # wait until command is done |  | ||||||
|             exit_code = container.wait() |  | ||||||
|             output = container.logs(stdout=stdout, stderr=stderr) |  | ||||||
|  |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.error("Can't execute command -> %s", err) |  | ||||||
|             return (None, b"") |  | ||||||
|  |  | ||||||
|         # cleanup container |  | ||||||
|         with suppress(docker.errors.DockerException): |  | ||||||
|             container.remove(force=True) |  | ||||||
|  |  | ||||||
|         return (exit_code, output) |  | ||||||
| @@ -1,364 +0,0 @@ | |||||||
| """Init file for HassIO addon docker object.""" |  | ||||||
| import logging |  | ||||||
| import os |  | ||||||
|  |  | ||||||
| import docker |  | ||||||
| import requests |  | ||||||
|  |  | ||||||
| from .interface import DockerInterface |  | ||||||
| from .util import docker_process |  | ||||||
| from ..addons.build import AddonBuild |  | ||||||
| from ..const import ( |  | ||||||
|     MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE) |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerAddon(DockerInterface): |  | ||||||
|     """Docker hassio wrapper for HomeAssistant.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, api, addon): |  | ||||||
|         """Initialize docker homeassistant wrapper.""" |  | ||||||
|         super().__init__( |  | ||||||
|             config, loop, api, image=addon.image, timeout=addon.timeout) |  | ||||||
|         self.addon = addon |  | ||||||
|  |  | ||||||
|     def process_metadata(self, metadata, force=False): |  | ||||||
|         """Use addon data instead meta data with legacy.""" |  | ||||||
|         if not self.addon.legacy: |  | ||||||
|             return super().process_metadata(metadata, force=force) |  | ||||||
|  |  | ||||||
|         # set meta data |  | ||||||
|         if not self.version or force: |  | ||||||
|             if force:  # called on install/update/build |  | ||||||
|                 self.version = self.addon.last_version |  | ||||||
|             else: |  | ||||||
|                 self.version = self.addon.version_installed |  | ||||||
|  |  | ||||||
|         if not self.arch: |  | ||||||
|             self.arch = self.config.arch |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return name of docker container.""" |  | ||||||
|         return "addon_{}".format(self.addon.slug) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def ipc(self): |  | ||||||
|         """Return the IPC namespace.""" |  | ||||||
|         if self.addon.host_ipc: |  | ||||||
|             return 'host' |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def hostname(self): |  | ||||||
|         """Return slug/id of addon.""" |  | ||||||
|         return self.addon.slug.replace('_', '-') |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def environment(self): |  | ||||||
|         """Return environment for docker add-on.""" |  | ||||||
|         addon_env = self.addon.environment or {} |  | ||||||
|         if self.addon.with_audio: |  | ||||||
|             addon_env.update({ |  | ||||||
|                 'ALSA_OUTPUT': self.addon.audio_output, |  | ||||||
|                 'ALSA_INPUT': self.addon.audio_input, |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|         # Set api token if any API access is needed |  | ||||||
|         if self.addon.access_hassio_api or self.addon.access_homeassistant_api: |  | ||||||
|             addon_env['API_TOKEN'] = self.addon.api_token |  | ||||||
|  |  | ||||||
|         return { |  | ||||||
|             **addon_env, |  | ||||||
|             'TZ': self.config.timezone, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def devices(self): |  | ||||||
|         """Return needed devices.""" |  | ||||||
|         devices = self.addon.devices or [] |  | ||||||
|  |  | ||||||
|         # Use audio devices |  | ||||||
|         if self.addon.with_audio and AUDIO_DEVICE not in devices: |  | ||||||
|             devices.append(AUDIO_DEVICE) |  | ||||||
|  |  | ||||||
|         # Auto mapping UART devices |  | ||||||
|         if self.addon.auto_uart: |  | ||||||
|             for uart_dev in self.docker.hardware.serial_devices: |  | ||||||
|                 devices.append("{0}:{0}:rwm".format(uart_dev)) |  | ||||||
|  |  | ||||||
|         # Return None if no devices is present |  | ||||||
|         return devices or None |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def ports(self): |  | ||||||
|         """Filter None from addon ports.""" |  | ||||||
|         if not self.addon.ports: |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|         return { |  | ||||||
|             container_port: host_port |  | ||||||
|             for container_port, host_port in self.addon.ports.items() |  | ||||||
|             if host_port |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def security_opt(self): |  | ||||||
|         """Controlling security opt.""" |  | ||||||
|         privileged = self.addon.privileged or [] |  | ||||||
|  |  | ||||||
|         # Disable AppArmor sinse it make troubles wit SYS_ADMIN |  | ||||||
|         if 'SYS_ADMIN' in privileged: |  | ||||||
|             return [ |  | ||||||
|                 "apparmor:unconfined", |  | ||||||
|             ] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def tmpfs(self): |  | ||||||
|         """Return tmpfs for docker add-on.""" |  | ||||||
|         options = self.addon.tmpfs |  | ||||||
|         if options: |  | ||||||
|             return {"/tmpfs": "{}".format(options)} |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def network_mapping(self): |  | ||||||
|         """Return hosts mapping.""" |  | ||||||
|         return { |  | ||||||
|             'homeassistant': self.docker.network.gateway, |  | ||||||
|             'hassio': self.docker.network.supervisor, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def network_mode(self): |  | ||||||
|         """Return network mode for addon.""" |  | ||||||
|         if self.addon.host_network: |  | ||||||
|             return 'host' |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def volumes(self): |  | ||||||
|         """Generate volumes for mappings.""" |  | ||||||
|         volumes = { |  | ||||||
|             str(self.addon.path_extern_data): { |  | ||||||
|                 'bind': "/data", 'mode': 'rw' |  | ||||||
|             }} |  | ||||||
|  |  | ||||||
|         addon_mapping = self.addon.map_volumes |  | ||||||
|  |  | ||||||
|         # setup config mappings |  | ||||||
|         if MAP_CONFIG in addon_mapping: |  | ||||||
|             volumes.update({ |  | ||||||
|                 str(self.config.path_extern_config): { |  | ||||||
|                     'bind': "/config", 'mode': addon_mapping[MAP_CONFIG] |  | ||||||
|                 }}) |  | ||||||
|  |  | ||||||
|         if MAP_SSL in addon_mapping: |  | ||||||
|             volumes.update({ |  | ||||||
|                 str(self.config.path_extern_ssl): { |  | ||||||
|                     'bind': "/ssl", 'mode': addon_mapping[MAP_SSL] |  | ||||||
|                 }}) |  | ||||||
|  |  | ||||||
|         if MAP_ADDONS in addon_mapping: |  | ||||||
|             volumes.update({ |  | ||||||
|                 str(self.config.path_extern_addons_local): { |  | ||||||
|                     'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS] |  | ||||||
|                 }}) |  | ||||||
|  |  | ||||||
|         if MAP_BACKUP in addon_mapping: |  | ||||||
|             volumes.update({ |  | ||||||
|                 str(self.config.path_extern_backup): { |  | ||||||
|                     'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP] |  | ||||||
|                 }}) |  | ||||||
|  |  | ||||||
|         if MAP_SHARE in addon_mapping: |  | ||||||
|             volumes.update({ |  | ||||||
|                 str(self.config.path_extern_share): { |  | ||||||
|                     'bind': "/share", 'mode': addon_mapping[MAP_SHARE] |  | ||||||
|                 }}) |  | ||||||
|  |  | ||||||
|         # init other hardware mappings |  | ||||||
|         if self.addon.with_gpio: |  | ||||||
|             volumes.update({ |  | ||||||
|                 "/sys/class/gpio": { |  | ||||||
|                     'bind': "/sys/class/gpio", 'mode': 'rw' |  | ||||||
|                 }, |  | ||||||
|                 "/sys/devices/platform/soc": { |  | ||||||
|                     'bind': "/sys/devices/platform/soc", 'mode': 'rw' |  | ||||||
|                 }, |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|         # host dbus system |  | ||||||
|         if self.addon.host_dbus: |  | ||||||
|             volumes.update({ |  | ||||||
|                 "/var/run/dbus": { |  | ||||||
|                     'bind': "/var/run/dbus", 'mode': 'rw' |  | ||||||
|                 }}) |  | ||||||
|  |  | ||||||
|         return volumes |  | ||||||
|  |  | ||||||
|     def _run(self): |  | ||||||
|         """Run docker image. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         if self._is_running(): |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         # cleanup |  | ||||||
|         self._stop() |  | ||||||
|  |  | ||||||
|         # write config |  | ||||||
|         if not self.addon.write_options(): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         ret = self.docker.run( |  | ||||||
|             self.image, |  | ||||||
|             name=self.name, |  | ||||||
|             hostname=self.hostname, |  | ||||||
|             detach=True, |  | ||||||
|             init=True, |  | ||||||
|             ipc_mode=self.ipc, |  | ||||||
|             stdin_open=self.addon.with_stdin, |  | ||||||
|             network_mode=self.network_mode, |  | ||||||
|             ports=self.ports, |  | ||||||
|             extra_hosts=self.network_mapping, |  | ||||||
|             devices=self.devices, |  | ||||||
|             cap_add=self.addon.privileged, |  | ||||||
|             security_opt=self.security_opt, |  | ||||||
|             environment=self.environment, |  | ||||||
|             volumes=self.volumes, |  | ||||||
|             tmpfs=self.tmpfs |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         if ret: |  | ||||||
|             _LOGGER.info("Start docker addon %s with version %s", |  | ||||||
|                          self.image, self.version) |  | ||||||
|  |  | ||||||
|         return ret |  | ||||||
|  |  | ||||||
|     def _install(self, tag): |  | ||||||
|         """Pull docker image or build it. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         if self.addon.need_build: |  | ||||||
|             return self._build(tag) |  | ||||||
|  |  | ||||||
|         return super()._install(tag) |  | ||||||
|  |  | ||||||
|     def _build(self, tag): |  | ||||||
|         """Build a docker container. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         build_env = AddonBuild(self.config, self.addon) |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Start build %s:%s", self.image, tag) |  | ||||||
|         try: |  | ||||||
|             image = self.docker.images.build(**build_env.get_docker_args(tag)) |  | ||||||
|  |  | ||||||
|             image.tag(self.image, tag='latest') |  | ||||||
|             self.process_metadata(image.attrs, force=True) |  | ||||||
|  |  | ||||||
|         except (docker.errors.DockerException) as err: |  | ||||||
|             _LOGGER.error("Can't build %s:%s -> %s", self.image, tag, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Build %s:%s done", self.image, tag) |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def export_image(self, path): |  | ||||||
|         """Export current images into a tar file.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._export_image, path) |  | ||||||
|  |  | ||||||
|     def _export_image(self, tar_file): |  | ||||||
|         """Export current images into a tar file. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             image = self.docker.api.get_image(self.image) |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.error("Can't fetch image %s -> %s", self.image, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             with tar_file.open("wb") as write_tar: |  | ||||||
|                 for chunk in image.stream(): |  | ||||||
|                     write_tar.write(chunk) |  | ||||||
|         except (OSError, requests.exceptions.ReadTimeout) as err: |  | ||||||
|             _LOGGER.error("Can't write tar file %s -> %s", tar_file, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Export image %s to %s", self.image, tar_file) |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def import_image(self, path, tag): |  | ||||||
|         """Import a tar file as image.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._import_image, path, tag) |  | ||||||
|  |  | ||||||
|     def _import_image(self, tar_file, tag): |  | ||||||
|         """Import a tar file as image. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             with tar_file.open("rb") as read_tar: |  | ||||||
|                 self.docker.api.load_image(read_tar) |  | ||||||
|  |  | ||||||
|             image = self.docker.images.get(self.image) |  | ||||||
|             image.tag(self.image, tag=tag) |  | ||||||
|         except (docker.errors.DockerException, OSError) as err: |  | ||||||
|             _LOGGER.error("Can't import image %s -> %s", self.image, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Import image %s and tag %s", tar_file, tag) |  | ||||||
|         self.process_metadata(image.attrs, force=True) |  | ||||||
|         self._cleanup() |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def _restart(self): |  | ||||||
|         """Restart docker container. |  | ||||||
|  |  | ||||||
|         Addons prepare some thing on start and that is normaly not repeatable. |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         self._stop() |  | ||||||
|         return self._run() |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def write_stdin(self, data): |  | ||||||
|         """Write to add-on stdin.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._write_stdin, data) |  | ||||||
|  |  | ||||||
|     def _write_stdin(self, data): |  | ||||||
|         """Write to add-on stdin. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         if not self._is_running(): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             # load needed docker objects |  | ||||||
|             container = self.docker.containers.get(self.name) |  | ||||||
|             socket = container.attach_socket(params={'stdin': 1, 'stream': 1}) |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.error("Can't attach to %s stdin -> %s", self.name, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             # write to stdin |  | ||||||
|             data += b"\n" |  | ||||||
|             os.write(socket.fileno(), data) |  | ||||||
|             socket.close() |  | ||||||
|         except OSError as err: |  | ||||||
|             _LOGGER.error("Can't write to %s stdin -> %s", self.name, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
| @@ -1,114 +0,0 @@ | |||||||
| """Init file for HassIO docker object.""" |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import docker |  | ||||||
|  |  | ||||||
| from .interface import DockerInterface |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| HASS_DOCKER_NAME = 'homeassistant' |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerHomeAssistant(DockerInterface): |  | ||||||
|     """Docker hassio wrapper for HomeAssistant.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, api, data): |  | ||||||
|         """Initialize docker homeassistant wrapper.""" |  | ||||||
|         super().__init__(config, loop, api, image=data.image) |  | ||||||
|         self.data = data |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return name of docker container.""" |  | ||||||
|         return HASS_DOCKER_NAME |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def devices(self): |  | ||||||
|         """Create list of special device to map into docker.""" |  | ||||||
|         if not self.data.devices: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         devices = [] |  | ||||||
|         for device in self.data.devices: |  | ||||||
|             devices.append("/dev/{0}:/dev/{0}:rwm".format(device)) |  | ||||||
|  |  | ||||||
|         return devices |  | ||||||
|  |  | ||||||
|     def _run(self): |  | ||||||
|         """Run docker image. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         if self._is_running(): |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # cleanup |  | ||||||
|         self._stop() |  | ||||||
|  |  | ||||||
|         ret = self.docker.run( |  | ||||||
|             self.image, |  | ||||||
|             name=self.name, |  | ||||||
|             hostname=self.name, |  | ||||||
|             detach=True, |  | ||||||
|             privileged=True, |  | ||||||
|             init=True, |  | ||||||
|             devices=self.devices, |  | ||||||
|             network_mode='host', |  | ||||||
|             environment={ |  | ||||||
|                 'HASSIO': self.docker.network.supervisor, |  | ||||||
|                 'TZ': self.config.timezone, |  | ||||||
|             }, |  | ||||||
|             volumes={ |  | ||||||
|                 str(self.config.path_extern_config): |  | ||||||
|                     {'bind': '/config', 'mode': 'rw'}, |  | ||||||
|                 str(self.config.path_extern_ssl): |  | ||||||
|                     {'bind': '/ssl', 'mode': 'ro'}, |  | ||||||
|                 str(self.config.path_extern_share): |  | ||||||
|                     {'bind': '/share', 'mode': 'rw'}, |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         if ret: |  | ||||||
|             _LOGGER.info("Start homeassistant %s with version %s", |  | ||||||
|                          self.image, self.version) |  | ||||||
|  |  | ||||||
|         return ret |  | ||||||
|  |  | ||||||
|     def _execute_command(self, command): |  | ||||||
|         """Create a temporary container and run command. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         return self.docker.run_command( |  | ||||||
|             self.image, |  | ||||||
|             command, |  | ||||||
|             detach=True, |  | ||||||
|             stdout=True, |  | ||||||
|             stderr=True, |  | ||||||
|             environment={ |  | ||||||
|                 'TZ': self.config.timezone, |  | ||||||
|             }, |  | ||||||
|             volumes={ |  | ||||||
|                 str(self.config.path_extern_config): |  | ||||||
|                     {'bind': '/config', 'mode': 'ro'}, |  | ||||||
|                 str(self.config.path_extern_ssl): |  | ||||||
|                     {'bind': '/ssl', 'mode': 'ro'}, |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def is_initialize(self): |  | ||||||
|         """Return True if docker container exists.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._is_initialize) |  | ||||||
|  |  | ||||||
|     def _is_initialize(self): |  | ||||||
|         """Return True if docker container exists. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             self.docker.containers.get(self.name) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
| @@ -1,327 +0,0 @@ | |||||||
| """Interface class for HassIO docker object.""" |  | ||||||
| import asyncio |  | ||||||
| from contextlib import suppress |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import docker |  | ||||||
|  |  | ||||||
| from .util import docker_process |  | ||||||
| from ..const import LABEL_VERSION, LABEL_ARCH |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerInterface(object): |  | ||||||
|     """Docker hassio interface.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, api, image=None, timeout=30): |  | ||||||
|         """Initialize docker base wrapper.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.docker = api |  | ||||||
|  |  | ||||||
|         self.image = image |  | ||||||
|         self.timeout = timeout |  | ||||||
|         self.version = None |  | ||||||
|         self.arch = None |  | ||||||
|         self._lock = asyncio.Lock(loop=loop) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return name of docker container.""" |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def in_progress(self): |  | ||||||
|         """Return True if a task is in progress.""" |  | ||||||
|         return self._lock.locked() |  | ||||||
|  |  | ||||||
|     def process_metadata(self, metadata, force=False): |  | ||||||
|         """Read metadata and set it to object.""" |  | ||||||
|         # read image |  | ||||||
|         if not self.image: |  | ||||||
|             self.image = metadata['Config']['Image'] |  | ||||||
|  |  | ||||||
|         # read version |  | ||||||
|         need_version = force or not self.version |  | ||||||
|         if need_version and LABEL_VERSION in metadata['Config']['Labels']: |  | ||||||
|             self.version = metadata['Config']['Labels'][LABEL_VERSION] |  | ||||||
|         elif need_version: |  | ||||||
|             _LOGGER.warning("Can't read version from %s", self.name) |  | ||||||
|  |  | ||||||
|         # read arch |  | ||||||
|         need_arch = force or not self.arch |  | ||||||
|         if need_arch and LABEL_ARCH in metadata['Config']['Labels']: |  | ||||||
|             self.arch = metadata['Config']['Labels'][LABEL_ARCH] |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def install(self, tag): |  | ||||||
|         """Pull docker image.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._install, tag) |  | ||||||
|  |  | ||||||
|     def _install(self, tag): |  | ||||||
|         """Pull docker image. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             _LOGGER.info("Pull image %s tag %s.", self.image, tag) |  | ||||||
|             image = self.docker.images.pull("{}:{}".format(self.image, tag)) |  | ||||||
|  |  | ||||||
|             image.tag(self.image, tag='latest') |  | ||||||
|             self.process_metadata(image.attrs, force=True) |  | ||||||
|         except docker.errors.APIError as err: |  | ||||||
|             _LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Tag image %s with version %s as latest", self.image, tag) |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def exists(self): |  | ||||||
|         """Return True if docker image exists in local repo.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._exists) |  | ||||||
|  |  | ||||||
|     def _exists(self): |  | ||||||
|         """Return True if docker image exists in local repo. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             self.docker.images.get(self.image) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def is_running(self): |  | ||||||
|         """Return True if docker is Running. |  | ||||||
|  |  | ||||||
|         Return a Future. |  | ||||||
|         """ |  | ||||||
|         return self.loop.run_in_executor(None, self._is_running) |  | ||||||
|  |  | ||||||
|     def _is_running(self): |  | ||||||
|         """Return True if docker is Running. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             container = self.docker.containers.get(self.name) |  | ||||||
|             image = self.docker.images.get(self.image) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # container is not running |  | ||||||
|         if container.status != 'running': |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # we run on a old image, stop and start it |  | ||||||
|         if container.image.id != image.id: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def attach(self): |  | ||||||
|         """Attach to running docker container.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._attach) |  | ||||||
|  |  | ||||||
|     def _attach(self): |  | ||||||
|         """Attach to running docker container. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             if self.image: |  | ||||||
|                 obj_data = self.docker.images.get(self.image).attrs |  | ||||||
|             else: |  | ||||||
|                 obj_data = self.docker.containers.get(self.name).attrs |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         self.process_metadata(obj_data) |  | ||||||
|         _LOGGER.info( |  | ||||||
|             "Attach to image %s with version %s", self.image, self.version) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def run(self): |  | ||||||
|         """Run docker image.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._run) |  | ||||||
|  |  | ||||||
|     def _run(self): |  | ||||||
|         """Run docker image. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         raise NotImplementedError() |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def stop(self): |  | ||||||
|         """Stop/remove docker container.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._stop) |  | ||||||
|  |  | ||||||
|     def _stop(self): |  | ||||||
|         """Stop/remove and remove docker container. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             container = self.docker.containers.get(self.name) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if container.status == 'running': |  | ||||||
|             _LOGGER.info("Stop %s docker application", self.image) |  | ||||||
|             with suppress(docker.errors.DockerException): |  | ||||||
|                 container.stop(timeout=self.timeout) |  | ||||||
|  |  | ||||||
|         with suppress(docker.errors.DockerException): |  | ||||||
|             _LOGGER.info("Clean %s docker application", self.image) |  | ||||||
|             container.remove(force=True) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def remove(self): |  | ||||||
|         """Remove docker images.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._remove) |  | ||||||
|  |  | ||||||
|     def _remove(self): |  | ||||||
|         """remove docker images. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         # cleanup container |  | ||||||
|         self._stop() |  | ||||||
|  |  | ||||||
|         _LOGGER.info( |  | ||||||
|             "Remove docker %s with latest and %s", self.image, self.version) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             with suppress(docker.errors.ImageNotFound): |  | ||||||
|                 self.docker.images.remove( |  | ||||||
|                     image="{}:latest".format(self.image), force=True) |  | ||||||
|  |  | ||||||
|             with suppress(docker.errors.ImageNotFound): |  | ||||||
|                 self.docker.images.remove( |  | ||||||
|                     image="{}:{}".format(self.image, self.version), force=True) |  | ||||||
|  |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.warning("Can't remove image %s -> %s", self.image, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # clean metadata |  | ||||||
|         self.version = None |  | ||||||
|         self.arch = None |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def update(self, tag): |  | ||||||
|         """Update a docker image.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._update, tag) |  | ||||||
|  |  | ||||||
|     def _update(self, tag): |  | ||||||
|         """Update a docker image. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         _LOGGER.info( |  | ||||||
|             "Update docker %s with %s:%s", self.version, self.image, tag) |  | ||||||
|  |  | ||||||
|         # update docker image |  | ||||||
|         if not self._install(tag): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # stop container & cleanup |  | ||||||
|         self._stop() |  | ||||||
|         self._cleanup() |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def logs(self): |  | ||||||
|         """Return docker logs of container. |  | ||||||
|  |  | ||||||
|         Return a Future. |  | ||||||
|         """ |  | ||||||
|         return self.loop.run_in_executor(None, self._logs) |  | ||||||
|  |  | ||||||
|     def _logs(self): |  | ||||||
|         """Return docker logs of container. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             container = self.docker.containers.get(self.name) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return b"" |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             return container.logs(tail=100, stdout=True, stderr=True) |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.warning("Can't grap logs from %s -> %s", self.image, err) |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def restart(self): |  | ||||||
|         """Restart docker container.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._restart) |  | ||||||
|  |  | ||||||
|     def _restart(self): |  | ||||||
|         """Restart docker container. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             container = self.docker.containers.get(self.name) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Restart %s", self.image) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             container.restart(timeout=self.timeout) |  | ||||||
|         except docker.errors.DockerException as err: |  | ||||||
|             _LOGGER.warning("Can't restart %s -> %s", self.image, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def cleanup(self): |  | ||||||
|         """Check if old version exists and cleanup.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._cleanup) |  | ||||||
|  |  | ||||||
|     def _cleanup(self): |  | ||||||
|         """Check if old version exists and cleanup. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             latest = self.docker.images.get(self.image) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             _LOGGER.warning("Can't find %s for cleanup", self.image) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         for image in self.docker.images.list(name=self.image): |  | ||||||
|             if latest.id == image.id: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             with suppress(docker.errors.DockerException): |  | ||||||
|                 _LOGGER.info("Cleanup docker images: %s", image.tags) |  | ||||||
|                 self.docker.images.remove(image.id, force=True) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     def execute_command(self, command): |  | ||||||
|         """Create a temporary container and run command.""" |  | ||||||
|         return self.loop.run_in_executor(None, self._execute_command, command) |  | ||||||
|  |  | ||||||
|     def _execute_command(self, command): |  | ||||||
|         """Create a temporary container and run command. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         raise NotImplementedError() |  | ||||||
| @@ -1,89 +0,0 @@ | |||||||
| """Internal network manager for HassIO.""" |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import docker |  | ||||||
|  |  | ||||||
| from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerNetwork(object): |  | ||||||
|     """Internal HassIO Network.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, dock): |  | ||||||
|         """Initialize internal hassio network.""" |  | ||||||
|         self.docker = dock |  | ||||||
|         self.network = self._get_network() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return name of network.""" |  | ||||||
|         return DOCKER_NETWORK |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def containers(self): |  | ||||||
|         """Return of connected containers from network.""" |  | ||||||
|         return self.network.containers |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def gateway(self): |  | ||||||
|         """Return gateway of the network.""" |  | ||||||
|         return DOCKER_NETWORK_MASK[1] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def supervisor(self): |  | ||||||
|         """Return supervisor of the network.""" |  | ||||||
|         return DOCKER_NETWORK_MASK[2] |  | ||||||
|  |  | ||||||
|     def _get_network(self): |  | ||||||
|         """Get HassIO network.""" |  | ||||||
|         try: |  | ||||||
|             return self.docker.networks.get(DOCKER_NETWORK) |  | ||||||
|         except docker.errors.NotFound: |  | ||||||
|             _LOGGER.info("Can't find HassIO network, create new network") |  | ||||||
|  |  | ||||||
|         ipam_pool = docker.types.IPAMPool( |  | ||||||
|             subnet=str(DOCKER_NETWORK_MASK), |  | ||||||
|             gateway=str(self.gateway), |  | ||||||
|             iprange=str(DOCKER_NETWORK_RANGE) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool]) |  | ||||||
|  |  | ||||||
|         return self.docker.networks.create( |  | ||||||
|             DOCKER_NETWORK, driver='bridge', ipam=ipam_config, options={ |  | ||||||
|                 "com.docker.network.bridge.name": DOCKER_NETWORK, |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|     def attach_container(self, container, alias=None, ipv4=None): |  | ||||||
|         """Attach container to hassio network. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         ipv4 = str(ipv4) if ipv4 else None |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             self.network.connect(container, aliases=alias, ipv4_address=ipv4) |  | ||||||
|         except docker.errors.APIError as err: |  | ||||||
|             _LOGGER.error("Can't link container to hassio-net -> %s", err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         self.network.reload() |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def detach_default_bridge(self, container): |  | ||||||
|         """Detach default docker bridge. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             default_network = self.docker.networks.get('bridge') |  | ||||||
|             default_network.disconnect(container) |  | ||||||
|  |  | ||||||
|         except docker.errors.NotFound: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         except docker.errors.APIError as err: |  | ||||||
|             _LOGGER.warning( |  | ||||||
|                 "Can't disconnect container from default -> %s", err) |  | ||||||
| @@ -1,77 +0,0 @@ | |||||||
| """Init file for HassIO docker object.""" |  | ||||||
| import logging |  | ||||||
| import os |  | ||||||
|  |  | ||||||
| import docker |  | ||||||
|  |  | ||||||
| from .interface import DockerInterface |  | ||||||
| from .util import docker_process |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerSupervisor(DockerInterface): |  | ||||||
|     """Docker hassio wrapper for HomeAssistant.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, api, stop_callback, image=None): |  | ||||||
|         """Initialize docker base wrapper.""" |  | ||||||
|         super().__init__(config, loop, api, image=image) |  | ||||||
|         self.stop_callback = stop_callback |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return name of docker container.""" |  | ||||||
|         return os.environ['SUPERVISOR_NAME'] |  | ||||||
|  |  | ||||||
|     def _attach(self): |  | ||||||
|         """Attach to running docker container. |  | ||||||
|  |  | ||||||
|         Need run inside executor. |  | ||||||
|         """ |  | ||||||
|         try: |  | ||||||
|             container = self.docker.containers.get(self.name) |  | ||||||
|         except docker.errors.DockerException: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         self.process_metadata(container.attrs) |  | ||||||
|         _LOGGER.info("Attach to supervisor %s with version %s", |  | ||||||
|                      self.image, self.version) |  | ||||||
|  |  | ||||||
|         # if already attach |  | ||||||
|         if container in self.docker.network.containers: |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         # attach to network |  | ||||||
|         return self.docker.network.attach_container( |  | ||||||
|             container, alias=['hassio'], ipv4=self.docker.network.supervisor) |  | ||||||
|  |  | ||||||
|     @docker_process |  | ||||||
|     async def update(self, tag): |  | ||||||
|         """Update a supervisor docker image.""" |  | ||||||
|         _LOGGER.info("Update supervisor docker to %s:%s", self.image, tag) |  | ||||||
|  |  | ||||||
|         if await self.loop.run_in_executor(None, self._install, tag): |  | ||||||
|             self.loop.call_later(1, self.loop.stop) |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     async def run(self): |  | ||||||
|         """Run docker image.""" |  | ||||||
|         raise RuntimeError("Not support on supervisor docker container!") |  | ||||||
|  |  | ||||||
|     async def install(self, tag): |  | ||||||
|         """Pull docker image.""" |  | ||||||
|         raise RuntimeError("Not support on supervisor docker container!") |  | ||||||
|  |  | ||||||
|     async def stop(self): |  | ||||||
|         """Stop/remove docker container.""" |  | ||||||
|         raise RuntimeError("Not support on supervisor docker container!") |  | ||||||
|  |  | ||||||
|     async def remove(self): |  | ||||||
|         """Remove docker image.""" |  | ||||||
|         raise RuntimeError("Not support on supervisor docker container!") |  | ||||||
|  |  | ||||||
|     async def restart(self): |  | ||||||
|         """Restart docker container.""" |  | ||||||
|         raise RuntimeError("Not support on supervisor docker container!") |  | ||||||
| @@ -1,20 +0,0 @@ | |||||||
| """HassIO docker utilitys.""" |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=protected-access |  | ||||||
| def docker_process(method): |  | ||||||
|     """Wrap function with only run once.""" |  | ||||||
|     async def wrap_api(api, *args, **kwargs): |  | ||||||
|         """Return api wrapper.""" |  | ||||||
|         if api._lock.locked(): |  | ||||||
|             _LOGGER.error( |  | ||||||
|                 "Can't excute %s while a task is in progress", method.__name__) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         async with api._lock: |  | ||||||
|             return await method(api, *args, **kwargs) |  | ||||||
|  |  | ||||||
|     return wrap_api |  | ||||||
| @@ -1,121 +0,0 @@ | |||||||
| """Read hardware info from system.""" |  | ||||||
| from datetime import datetime |  | ||||||
| import logging |  | ||||||
| from pathlib import Path |  | ||||||
| import re |  | ||||||
|  |  | ||||||
| import pyudev |  | ||||||
|  |  | ||||||
| from .const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| ASOUND_CARDS = Path("/proc/asound/cards") |  | ||||||
| RE_CARDS = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)") |  | ||||||
|  |  | ||||||
| ASOUND_DEVICES = Path("/proc/asound/devices") |  | ||||||
| RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)") |  | ||||||
|  |  | ||||||
| PROC_STAT = Path("/proc/stat") |  | ||||||
| RE_BOOT_TIME = re.compile(r"btime (\d+)") |  | ||||||
|  |  | ||||||
| GPIO_DEVICES = Path("/sys/class/gpio") |  | ||||||
| RE_TTY = re.compile(r"tty[A-Z]+") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Hardware(object): |  | ||||||
|     """Represent a interface to procfs, sysfs and udev.""" |  | ||||||
|  |  | ||||||
|     def __init__(self): |  | ||||||
|         """Init hardware object.""" |  | ||||||
|         self.context = pyudev.Context() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serial_devices(self): |  | ||||||
|         """Return all serial and connected devices.""" |  | ||||||
|         dev_list = set() |  | ||||||
|         for device in self.context.list_devices(subsystem='tty'): |  | ||||||
|             if 'ID_VENDOR' in device or RE_TTY.search(device.device_node): |  | ||||||
|                 dev_list.add(device.device_node) |  | ||||||
|  |  | ||||||
|         return dev_list |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def input_devices(self): |  | ||||||
|         """Return all input devices.""" |  | ||||||
|         dev_list = set() |  | ||||||
|         for device in self.context.list_devices(subsystem='input'): |  | ||||||
|             if 'NAME' in device: |  | ||||||
|                 dev_list.add(device['NAME'].replace('"', '')) |  | ||||||
|  |  | ||||||
|         return dev_list |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def disk_devices(self): |  | ||||||
|         """Return all disk devices.""" |  | ||||||
|         dev_list = set() |  | ||||||
|         for device in self.context.list_devices(subsystem='block'): |  | ||||||
|             if device.device_node.startswith('/dev/sd'): |  | ||||||
|                 dev_list.add(device.device_node) |  | ||||||
|  |  | ||||||
|         return dev_list |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def audio_devices(self): |  | ||||||
|         """Return all available audio interfaces.""" |  | ||||||
|         try: |  | ||||||
|             with ASOUND_CARDS.open('r') as cards_file: |  | ||||||
|                 cards = cards_file.read() |  | ||||||
|             with ASOUND_DEVICES.open('r') as devices_file: |  | ||||||
|                 devices = devices_file.read() |  | ||||||
|         except OSError as err: |  | ||||||
|             _LOGGER.error("Can't read asound data -> %s", err) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         audio_list = {} |  | ||||||
|  |  | ||||||
|         # parse cards |  | ||||||
|         for match in RE_CARDS.finditer(cards): |  | ||||||
|             audio_list[match.group(1)] = { |  | ||||||
|                 ATTR_NAME: match.group(3), |  | ||||||
|                 ATTR_TYPE: match.group(2), |  | ||||||
|                 ATTR_DEVICES: {}, |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         # parse devices |  | ||||||
|         for match in RE_DEVICES.finditer(devices): |  | ||||||
|             try: |  | ||||||
|                 audio_list[match.group(1)][ATTR_DEVICES][match.group(2)] = \ |  | ||||||
|                     match.group(3) |  | ||||||
|             except KeyError: |  | ||||||
|                 _LOGGER.warning("Wrong audio device found %s", match.group(0)) |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|         return audio_list |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def gpio_devices(self): |  | ||||||
|         """Return list of GPIO interface on device.""" |  | ||||||
|         dev_list = set() |  | ||||||
|         for interface in GPIO_DEVICES.glob("gpio*"): |  | ||||||
|             dev_list.add(interface.name) |  | ||||||
|  |  | ||||||
|         return dev_list |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def last_boot(self): |  | ||||||
|         """Return last boot time.""" |  | ||||||
|         try: |  | ||||||
|             with PROC_STAT.open("r") as stat_file: |  | ||||||
|                 stats = stat_file.read() |  | ||||||
|         except OSError as err: |  | ||||||
|             _LOGGER.error("Can't read stat data -> %s", err) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # parse stat file |  | ||||||
|         found = RE_BOOT_TIME.search(stats) |  | ||||||
|         if not found: |  | ||||||
|             _LOGGER.error("Can't found last boot time!") |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         return datetime.utcfromtimestamp(int(found.group(1))) |  | ||||||
| @@ -1,290 +0,0 @@ | |||||||
| """HomeAssistant control object.""" |  | ||||||
| import asyncio |  | ||||||
| import logging |  | ||||||
| import os |  | ||||||
| import re |  | ||||||
|  |  | ||||||
| import aiohttp |  | ||||||
| from aiohttp.hdrs import CONTENT_TYPE |  | ||||||
| import async_timeout |  | ||||||
|  |  | ||||||
| from .const import ( |  | ||||||
|     FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, |  | ||||||
|     ATTR_VERSION, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, |  | ||||||
|     HEADER_HA_ACCESS, CONTENT_TYPE_JSON) |  | ||||||
| from .dock.homeassistant import DockerHomeAssistant |  | ||||||
| from .tools import JsonConfig, convert_to_ascii |  | ||||||
| from .validate import SCHEMA_HASS_CONFIG |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class HomeAssistant(JsonConfig): |  | ||||||
|     """Hass core object for handle it.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, docker, updater): |  | ||||||
|         """Initialize hass object.""" |  | ||||||
|         super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG) |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.updater = updater |  | ||||||
|         self.docker = DockerHomeAssistant(config, loop, docker, self) |  | ||||||
|         self.api_ip = docker.network.gateway |  | ||||||
|         self.websession = aiohttp.ClientSession( |  | ||||||
|             connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop) |  | ||||||
|  |  | ||||||
|     async def prepare(self): |  | ||||||
|         """Prepare HomeAssistant object.""" |  | ||||||
|         if not await self.docker.exists(): |  | ||||||
|             _LOGGER.info("No HomeAssistant docker %s found.", self.image) |  | ||||||
|             if self.is_custom_image: |  | ||||||
|                 await self.install() |  | ||||||
|             else: |  | ||||||
|                 await self.install_landingpage() |  | ||||||
|         else: |  | ||||||
|             await self.docker.attach() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def api_port(self): |  | ||||||
|         """Return network port to home-assistant instance.""" |  | ||||||
|         return self._data[ATTR_PORT] |  | ||||||
|  |  | ||||||
|     @api_port.setter |  | ||||||
|     def api_port(self, value): |  | ||||||
|         """Set network port for home-assistant instance.""" |  | ||||||
|         self._data[ATTR_PORT] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def api_password(self): |  | ||||||
|         """Return password for home-assistant instance.""" |  | ||||||
|         return self._data.get(ATTR_PASSWORD) |  | ||||||
|  |  | ||||||
|     @api_password.setter |  | ||||||
|     def api_password(self, value): |  | ||||||
|         """Set password for home-assistant instance.""" |  | ||||||
|         self._data[ATTR_PASSWORD] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def api_ssl(self): |  | ||||||
|         """Return if we need ssl to home-assistant instance.""" |  | ||||||
|         return self._data[ATTR_SSL] |  | ||||||
|  |  | ||||||
|     @api_ssl.setter |  | ||||||
|     def api_ssl(self, value): |  | ||||||
|         """Set SSL for home-assistant instance.""" |  | ||||||
|         self._data[ATTR_SSL] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def api_url(self): |  | ||||||
|         """Return API url to Home-Assistant.""" |  | ||||||
|         return "{}://{}:{}".format( |  | ||||||
|             'https' if self.api_ssl else 'http', self.api_ip, self.api_port |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def watchdog(self): |  | ||||||
|         """Return True if the watchdog should protect Home-Assistant.""" |  | ||||||
|         return self._data[ATTR_WATCHDOG] |  | ||||||
|  |  | ||||||
|     @watchdog.setter |  | ||||||
|     def watchdog(self, value): |  | ||||||
|         """Return True if the watchdog should protect Home-Assistant.""" |  | ||||||
|         self._data[ATTR_WATCHDOG] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def version(self): |  | ||||||
|         """Return version of running homeassistant.""" |  | ||||||
|         return self.docker.version |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def last_version(self): |  | ||||||
|         """Return last available version of homeassistant.""" |  | ||||||
|         if self.is_custom_image: |  | ||||||
|             return self._data.get(ATTR_LAST_VERSION) |  | ||||||
|         return self.updater.version_homeassistant |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def image(self): |  | ||||||
|         """Return image name of hass containter.""" |  | ||||||
|         if ATTR_IMAGE in self._data: |  | ||||||
|             return self._data[ATTR_IMAGE] |  | ||||||
|         return os.environ['HOMEASSISTANT_REPOSITORY'] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_custom_image(self): |  | ||||||
|         """Return True if a custom image is used.""" |  | ||||||
|         return ATTR_IMAGE in self._data |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def devices(self): |  | ||||||
|         """Return extend device mapping.""" |  | ||||||
|         return self._data[ATTR_DEVICES] |  | ||||||
|  |  | ||||||
|     @devices.setter |  | ||||||
|     def devices(self, value): |  | ||||||
|         """Set extend device mapping.""" |  | ||||||
|         self._data[ATTR_DEVICES] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def boot(self): |  | ||||||
|         """Return True if home-assistant boot is enabled.""" |  | ||||||
|         return self._data[ATTR_BOOT] |  | ||||||
|  |  | ||||||
|     @boot.setter |  | ||||||
|     def boot(self, value): |  | ||||||
|         """Set home-assistant boot options.""" |  | ||||||
|         self._data[ATTR_BOOT] = value |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     def set_custom(self, image, version): |  | ||||||
|         """Set a custom image for homeassistant.""" |  | ||||||
|         # reset |  | ||||||
|         if image is None and version is None: |  | ||||||
|             self._data.pop(ATTR_IMAGE, None) |  | ||||||
|             self._data.pop(ATTR_VERSION, None) |  | ||||||
|  |  | ||||||
|             self.docker.image = self.image |  | ||||||
|         else: |  | ||||||
|             if image: |  | ||||||
|                 self._data[ATTR_IMAGE] = image |  | ||||||
|                 self.docker.image = image |  | ||||||
|             if version: |  | ||||||
|                 self._data[ATTR_VERSION] = version |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     async def install_landingpage(self): |  | ||||||
|         """Install a landingpage.""" |  | ||||||
|         _LOGGER.info("Setup HomeAssistant landingpage") |  | ||||||
|         while True: |  | ||||||
|             if await self.docker.install('landingpage'): |  | ||||||
|                 break |  | ||||||
|             _LOGGER.warning("Fails install landingpage, retry after 60sec") |  | ||||||
|             await asyncio.sleep(60, loop=self.loop) |  | ||||||
|  |  | ||||||
|         # run landingpage after installation |  | ||||||
|         await self.docker.run() |  | ||||||
|  |  | ||||||
|     async def install(self): |  | ||||||
|         """Install a landingpage.""" |  | ||||||
|         _LOGGER.info("Setup HomeAssistant") |  | ||||||
|         while True: |  | ||||||
|             # read homeassistant tag and install it |  | ||||||
|             if not self.last_version: |  | ||||||
|                 await self.updater.fetch_data() |  | ||||||
|  |  | ||||||
|             tag = self.last_version |  | ||||||
|             if tag and await self.docker.install(tag): |  | ||||||
|                 break |  | ||||||
|             _LOGGER.warning("Error on install HomeAssistant. Retry in 60sec") |  | ||||||
|             await asyncio.sleep(60, loop=self.loop) |  | ||||||
|  |  | ||||||
|         # finishing |  | ||||||
|         _LOGGER.info("HomeAssistant docker now installed") |  | ||||||
|         if self.boot: |  | ||||||
|             await self.docker.run() |  | ||||||
|         await self.docker.cleanup() |  | ||||||
|  |  | ||||||
|     async def update(self, version=None): |  | ||||||
|         """Update HomeAssistant version.""" |  | ||||||
|         version = version or self.last_version |  | ||||||
|         running = await self.docker.is_running() |  | ||||||
|  |  | ||||||
|         if version == self.docker.version: |  | ||||||
|             _LOGGER.warning("Version %s is already installed", version) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             return await self.docker.update(version) |  | ||||||
|         finally: |  | ||||||
|             if running: |  | ||||||
|                 await self.docker.run() |  | ||||||
|  |  | ||||||
|     def run(self): |  | ||||||
|         """Run HomeAssistant docker. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.run() |  | ||||||
|  |  | ||||||
|     def stop(self): |  | ||||||
|         """Stop HomeAssistant docker. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.stop() |  | ||||||
|  |  | ||||||
|     def restart(self): |  | ||||||
|         """Restart HomeAssistant docker. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.restart() |  | ||||||
|  |  | ||||||
|     def logs(self): |  | ||||||
|         """Get HomeAssistant docker logs. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.logs() |  | ||||||
|  |  | ||||||
|     def is_running(self): |  | ||||||
|         """Return True if docker container is running. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.is_running() |  | ||||||
|  |  | ||||||
|     def is_initialize(self): |  | ||||||
|         """Return True if a docker container is exists. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self.docker.is_initialize() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def in_progress(self): |  | ||||||
|         """Return True if a task is in progress.""" |  | ||||||
|         return self.docker.in_progress |  | ||||||
|  |  | ||||||
|     async def check_config(self): |  | ||||||
|         """Run homeassistant config check.""" |  | ||||||
|         exit_code, log = await self.docker.execute_command( |  | ||||||
|             "python3 -m homeassistant -c /config --script check_config" |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # if not valid |  | ||||||
|         if exit_code is None: |  | ||||||
|             return (False, "") |  | ||||||
|  |  | ||||||
|         # parse output |  | ||||||
|         log = convert_to_ascii(log) |  | ||||||
|         if exit_code != 0 or RE_YAML_ERROR.search(log): |  | ||||||
|             return (False, log) |  | ||||||
|         return (True, log) |  | ||||||
|  |  | ||||||
|     async def check_api_state(self): |  | ||||||
|         """Check if Home-Assistant up and running.""" |  | ||||||
|         url = "{}/api/".format(self.api_url) |  | ||||||
|         header = {CONTENT_TYPE: CONTENT_TYPE_JSON} |  | ||||||
|  |  | ||||||
|         if self.api_password: |  | ||||||
|             header.update({HEADER_HA_ACCESS: self.api_password}) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             async with async_timeout.timeout(30, loop=self.loop): |  | ||||||
|                 async with self.websession.get(url, headers=header) as request: |  | ||||||
|                     status = request.status |  | ||||||
|  |  | ||||||
|         except (asyncio.TimeoutError, aiohttp.ClientError): |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if status not in (200, 201): |  | ||||||
|             _LOGGER.warning("Home-Assistant API config missmatch") |  | ||||||
|         return True |  | ||||||
| @@ -1,124 +0,0 @@ | |||||||
| """Host control for HassIO.""" |  | ||||||
| import asyncio |  | ||||||
| import json |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import async_timeout |  | ||||||
|  |  | ||||||
| from .const import ( |  | ||||||
|     SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES, |  | ||||||
|     ATTR_HOSTNAME, ATTR_OS) |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| TIMEOUT = 15 |  | ||||||
| UNKNOWN = 'unknown' |  | ||||||
|  |  | ||||||
| FEATURES_SHUTDOWN = 'shutdown' |  | ||||||
| FEATURES_REBOOT = 'reboot' |  | ||||||
| FEATURES_UPDATE = 'update' |  | ||||||
| FEATURES_HOSTNAME = 'hostname' |  | ||||||
| FEATURES_NETWORK_INFO = 'network_info' |  | ||||||
| FEATURES_NETWORK_CONTROL = 'network_control' |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class HostControl(object): |  | ||||||
|     """Client for host control.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, loop): |  | ||||||
|         """Initialize HostControl socket client.""" |  | ||||||
|         self.loop = loop |  | ||||||
|         self.active = False |  | ||||||
|         self.version = UNKNOWN |  | ||||||
|         self.last_version = UNKNOWN |  | ||||||
|         self.type = UNKNOWN |  | ||||||
|         self.features = [] |  | ||||||
|         self.hostname = UNKNOWN |  | ||||||
|         self.os_info = UNKNOWN |  | ||||||
|  |  | ||||||
|         if SOCKET_HC.is_socket(): |  | ||||||
|             self.active = True |  | ||||||
|  |  | ||||||
|     async def _send_command(self, command): |  | ||||||
|         """Send command to host. |  | ||||||
|  |  | ||||||
|         Is a coroutine. |  | ||||||
|         """ |  | ||||||
|         if not self.active: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         reader, writer = await asyncio.open_unix_connection( |  | ||||||
|             str(SOCKET_HC), loop=self.loop) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             # send |  | ||||||
|             _LOGGER.info("Send '%s' to HostControl.", command) |  | ||||||
|  |  | ||||||
|             with async_timeout.timeout(TIMEOUT, loop=self.loop): |  | ||||||
|                 writer.write("{}\n".format(command).encode()) |  | ||||||
|                 data = await reader.readline() |  | ||||||
|  |  | ||||||
|             response = data.decode().rstrip() |  | ||||||
|             _LOGGER.info("Receive from HostControl: %s.", response) |  | ||||||
|  |  | ||||||
|             if response == "OK": |  | ||||||
|                 return True |  | ||||||
|             elif response == "ERROR": |  | ||||||
|                 return False |  | ||||||
|             elif response == "WRONG": |  | ||||||
|                 return None |  | ||||||
|             else: |  | ||||||
|                 try: |  | ||||||
|                     return json.loads(response) |  | ||||||
|                 except json.JSONDecodeError: |  | ||||||
|                     _LOGGER.warning("Json parse error from HostControl '%s'.", |  | ||||||
|                                     response) |  | ||||||
|  |  | ||||||
|         except asyncio.TimeoutError: |  | ||||||
|             _LOGGER.error("Timeout from HostControl!") |  | ||||||
|  |  | ||||||
|         finally: |  | ||||||
|             writer.close() |  | ||||||
|  |  | ||||||
|     async def load(self): |  | ||||||
|         """Load Info from host. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         info = await self._send_command("info") |  | ||||||
|         if not info: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         self.version = info.get(ATTR_VERSION, UNKNOWN) |  | ||||||
|         self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN) |  | ||||||
|         self.type = info.get(ATTR_TYPE, UNKNOWN) |  | ||||||
|         self.features = info.get(ATTR_FEATURES, []) |  | ||||||
|         self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN) |  | ||||||
|         self.os_info = info.get(ATTR_OS, UNKNOWN) |  | ||||||
|  |  | ||||||
|     def reboot(self): |  | ||||||
|         """Reboot the host system. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self._send_command("reboot") |  | ||||||
|  |  | ||||||
|     def shutdown(self): |  | ||||||
|         """Shutdown the host system. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         return self._send_command("shutdown") |  | ||||||
|  |  | ||||||
|     def update(self, version=None): |  | ||||||
|         """Update the host system. |  | ||||||
|  |  | ||||||
|         Return a coroutine. |  | ||||||
|         """ |  | ||||||
|         if version: |  | ||||||
|             return self._send_command("update {}".format(version)) |  | ||||||
|         return self._send_command("update") |  | ||||||
|  |  | ||||||
|     def set_hostname(self, hostname): |  | ||||||
|         """Update hostname on host.""" |  | ||||||
|         return self._send_command("hostname {}".format(hostname)) |  | ||||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
										
											Binary file not shown.
										
									
								
							| @@ -1,56 +0,0 @@ | |||||||
| """Schedule for HassIO.""" |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| SEC = 'seconds' |  | ||||||
| REPEAT = 'repeat' |  | ||||||
| CALL = 'callback' |  | ||||||
| TASK = 'task' |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Scheduler(object): |  | ||||||
|     """Schedule task inside HassIO.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, loop): |  | ||||||
|         """Initialize task schedule.""" |  | ||||||
|         self.loop = loop |  | ||||||
|         self._data = {} |  | ||||||
|         self.suspend = False |  | ||||||
|  |  | ||||||
|     def register_task(self, coro_callback, seconds, repeat=True, |  | ||||||
|                       now=False): |  | ||||||
|         """Schedule a coroutine. |  | ||||||
|  |  | ||||||
|         The coroutien need to be a callback without arguments. |  | ||||||
|         """ |  | ||||||
|         idx = hash(coro_callback) |  | ||||||
|  |  | ||||||
|         # generate data |  | ||||||
|         opts = { |  | ||||||
|             CALL: coro_callback, |  | ||||||
|             SEC: seconds, |  | ||||||
|             REPEAT: repeat, |  | ||||||
|         } |  | ||||||
|         self._data[idx] = opts |  | ||||||
|  |  | ||||||
|         # schedule task |  | ||||||
|         if now: |  | ||||||
|             self._run_task(idx) |  | ||||||
|         else: |  | ||||||
|             task = self.loop.call_later(seconds, self._run_task, idx) |  | ||||||
|             self._data[idx][TASK] = task |  | ||||||
|  |  | ||||||
|         return idx |  | ||||||
|  |  | ||||||
|     def _run_task(self, idx): |  | ||||||
|         """Run a scheduled task.""" |  | ||||||
|         data = self._data.pop(idx) |  | ||||||
|  |  | ||||||
|         if not self.suspend: |  | ||||||
|             self.loop.create_task(data[CALL]()) |  | ||||||
|  |  | ||||||
|         if data[REPEAT]: |  | ||||||
|             task = self.loop.call_later(data[SEC], self._run_task, idx) |  | ||||||
|             data[TASK] = task |  | ||||||
|             self._data[idx] = data |  | ||||||
| @@ -1,314 +0,0 @@ | |||||||
| """Snapshot system control.""" |  | ||||||
| import asyncio |  | ||||||
| from datetime import datetime |  | ||||||
| import logging |  | ||||||
| from pathlib import Path |  | ||||||
| import tarfile |  | ||||||
|  |  | ||||||
| from .snapshot import Snapshot |  | ||||||
| from .util import create_slug |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL) |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SnapshotsManager(object): |  | ||||||
|     """Manage snapshots.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, sheduler, addons, homeassistant): |  | ||||||
|         """Initialize a snapshot manager.""" |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.sheduler = sheduler |  | ||||||
|         self.addons = addons |  | ||||||
|         self.homeassistant = homeassistant |  | ||||||
|         self.snapshots = {} |  | ||||||
|         self._lock = asyncio.Lock(loop=loop) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def list_snapshots(self): |  | ||||||
|         """Return a list of all snapshot object.""" |  | ||||||
|         return set(self.snapshots.values()) |  | ||||||
|  |  | ||||||
|     def get(self, slug): |  | ||||||
|         """Return snapshot object.""" |  | ||||||
|         return self.snapshots.get(slug) |  | ||||||
|  |  | ||||||
|     def _create_snapshot(self, name, sys_type): |  | ||||||
|         """Initialize a new snapshot object from name.""" |  | ||||||
|         date_str = datetime.utcnow().isoformat() |  | ||||||
|         slug = create_slug(name, date_str) |  | ||||||
|         tar_file = Path(self.config.path_backup, "{}.tar".format(slug)) |  | ||||||
|  |  | ||||||
|         # init object |  | ||||||
|         snapshot = Snapshot(self.config, self.loop, tar_file) |  | ||||||
|         snapshot.create(slug, name, date_str, sys_type) |  | ||||||
|  |  | ||||||
|         # set general data |  | ||||||
|         snapshot.snapshot_homeassistant(self.homeassistant) |  | ||||||
|         snapshot.repositories = self.config.addons_repositories |  | ||||||
|  |  | ||||||
|         return snapshot |  | ||||||
|  |  | ||||||
|     async def reload(self): |  | ||||||
|         """Load exists backups.""" |  | ||||||
|         self.snapshots = {} |  | ||||||
|  |  | ||||||
|         async def _load_snapshot(tar_file): |  | ||||||
|             """Internal function to load snapshot.""" |  | ||||||
|             snapshot = Snapshot(self.config, self.loop, tar_file) |  | ||||||
|             if await snapshot.load(): |  | ||||||
|                 self.snapshots[snapshot.slug] = snapshot |  | ||||||
|  |  | ||||||
|         tasks = [_load_snapshot(tar_file) for tar_file in |  | ||||||
|                  self.config.path_backup.glob("*.tar")] |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Found %d snapshot files", len(tasks)) |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|     def remove(self, snapshot): |  | ||||||
|         """Remove a snapshot.""" |  | ||||||
|         try: |  | ||||||
|             snapshot.tar_file.unlink() |  | ||||||
|             self.snapshots.pop(snapshot.slug, None) |  | ||||||
|         except OSError as err: |  | ||||||
|             _LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def do_snapshot_full(self, name=""): |  | ||||||
|         """Create a full snapshot.""" |  | ||||||
|         if self._lock.locked(): |  | ||||||
|             _LOGGER.error("It is already a snapshot/restore process running") |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         snapshot = self._create_snapshot(name, SNAPSHOT_FULL) |  | ||||||
|         _LOGGER.info("Full-Snapshot %s start", snapshot.slug) |  | ||||||
|         try: |  | ||||||
|             self.sheduler.suspend = True |  | ||||||
|             await self._lock.acquire() |  | ||||||
|  |  | ||||||
|             async with snapshot: |  | ||||||
|                 # snapshot addons |  | ||||||
|                 tasks = [] |  | ||||||
|                 for addon in self.addons.list_addons: |  | ||||||
|                     if not addon.is_installed: |  | ||||||
|                         continue |  | ||||||
|                     tasks.append(snapshot.import_addon(addon)) |  | ||||||
|  |  | ||||||
|                 if tasks: |  | ||||||
|                     _LOGGER.info("Full-Snapshot %s run %d addons", |  | ||||||
|                                  snapshot.slug, len(tasks)) |  | ||||||
|                     await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|                 # snapshot folders |  | ||||||
|                 _LOGGER.info("Full-Snapshot %s store folders", snapshot.slug) |  | ||||||
|                 await snapshot.store_folders() |  | ||||||
|  |  | ||||||
|             _LOGGER.info("Full-Snapshot %s done", snapshot.slug) |  | ||||||
|             self.snapshots[snapshot.slug] = snapshot |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         except (OSError, ValueError, tarfile.TarError) as err: |  | ||||||
|             _LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         finally: |  | ||||||
|             self.sheduler.suspend = False |  | ||||||
|             self._lock.release() |  | ||||||
|  |  | ||||||
|     async def do_snapshot_partial(self, name="", addons=None, folders=None): |  | ||||||
|         """Create a partial snapshot.""" |  | ||||||
|         if self._lock.locked(): |  | ||||||
|             _LOGGER.error("It is already a snapshot/restore process running") |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         addons = addons or [] |  | ||||||
|         folders = folders or [] |  | ||||||
|         snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL) |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Partial-Snapshot %s start", snapshot.slug) |  | ||||||
|         try: |  | ||||||
|             self.sheduler.suspend = True |  | ||||||
|             await self._lock.acquire() |  | ||||||
|  |  | ||||||
|             async with snapshot: |  | ||||||
|                 # snapshot addons |  | ||||||
|                 tasks = [] |  | ||||||
|                 for slug in addons: |  | ||||||
|                     addon = self.addons.get(slug) |  | ||||||
|                     if addon.is_installed: |  | ||||||
|                         tasks.append(snapshot.import_addon(addon)) |  | ||||||
|  |  | ||||||
|                 if tasks: |  | ||||||
|                     _LOGGER.info("Partial-Snapshot %s run %d addons", |  | ||||||
|                                  snapshot.slug, len(tasks)) |  | ||||||
|                     await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|                 # snapshot folders |  | ||||||
|                 _LOGGER.info("Partial-Snapshot %s store folders %s", |  | ||||||
|                              snapshot.slug, folders) |  | ||||||
|                 await snapshot.store_folders(folders) |  | ||||||
|  |  | ||||||
|             _LOGGER.info("Partial-Snapshot %s done", snapshot.slug) |  | ||||||
|             self.snapshots[snapshot.slug] = snapshot |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         except (OSError, ValueError, tarfile.TarError) as err: |  | ||||||
|             _LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         finally: |  | ||||||
|             self.sheduler.suspend = False |  | ||||||
|             self._lock.release() |  | ||||||
|  |  | ||||||
|     async def do_restore_full(self, snapshot): |  | ||||||
|         """Restore a snapshot.""" |  | ||||||
|         if self._lock.locked(): |  | ||||||
|             _LOGGER.error("It is already a snapshot/restore process running") |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         if snapshot.sys_type != SNAPSHOT_FULL: |  | ||||||
|             _LOGGER.error( |  | ||||||
|                 "Full-Restore %s is only a partial snapshot!", snapshot.slug) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Full-Restore %s start", snapshot.slug) |  | ||||||
|         try: |  | ||||||
|             self.sheduler.suspend = True |  | ||||||
|             await self._lock.acquire() |  | ||||||
|  |  | ||||||
|             async with snapshot: |  | ||||||
|                 # stop system |  | ||||||
|                 tasks = [] |  | ||||||
|                 tasks.append(self.homeassistant.stop()) |  | ||||||
|  |  | ||||||
|                 for addon in self.addons.list_addons: |  | ||||||
|                     if addon.is_installed: |  | ||||||
|                         tasks.append(addon.stop()) |  | ||||||
|  |  | ||||||
|                 await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|                 # restore folders |  | ||||||
|                 _LOGGER.info("Full-Restore %s restore folders", snapshot.slug) |  | ||||||
|                 await snapshot.restore_folders() |  | ||||||
|  |  | ||||||
|                 # start homeassistant restore |  | ||||||
|                 _LOGGER.info("Full-Restore %s restore Home-Assistant", |  | ||||||
|                              snapshot.slug) |  | ||||||
|                 snapshot.restore_homeassistant(self.homeassistant) |  | ||||||
|                 task_hass = self.loop.create_task( |  | ||||||
|                     self.homeassistant.update(snapshot.homeassistant_version)) |  | ||||||
|  |  | ||||||
|                 # restore repositories |  | ||||||
|                 await self.addons.load_repositories(snapshot.repositories) |  | ||||||
|  |  | ||||||
|                 # restore addons |  | ||||||
|                 tasks = [] |  | ||||||
|                 actual_addons = \ |  | ||||||
|                     set(addon.slug for addon in self.addons.list_addons |  | ||||||
|                         if addon.is_installed) |  | ||||||
|                 restore_addons = \ |  | ||||||
|                     set(data[ATTR_SLUG] for data in snapshot.addons) |  | ||||||
|                 remove_addons = actual_addons - restore_addons |  | ||||||
|  |  | ||||||
|                 _LOGGER.info("Full-Restore %s restore addons %s, remove %s", |  | ||||||
|                              snapshot.slug, restore_addons, remove_addons) |  | ||||||
|  |  | ||||||
|                 for slug in remove_addons: |  | ||||||
|                     addon = self.addons.get(slug) |  | ||||||
|                     if addon: |  | ||||||
|                         tasks.append(addon.uninstall()) |  | ||||||
|                     else: |  | ||||||
|                         _LOGGER.warning("Can't remove addon %s", slug) |  | ||||||
|  |  | ||||||
|                 for slug in restore_addons: |  | ||||||
|                     addon = self.addons.get(slug) |  | ||||||
|                     if addon: |  | ||||||
|                         tasks.append(snapshot.export_addon(addon)) |  | ||||||
|                     else: |  | ||||||
|                         _LOGGER.warning("Can't restore addon %s", slug) |  | ||||||
|  |  | ||||||
|                 if tasks: |  | ||||||
|                     _LOGGER.info("Full-Restore %s restore addons tasks %d", |  | ||||||
|                                  snapshot.slug, len(tasks)) |  | ||||||
|                     await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|                 # finish homeassistant task |  | ||||||
|                 _LOGGER.info("Full-Restore %s wait until homeassistant ready", |  | ||||||
|                              snapshot.slug) |  | ||||||
|                 await task_hass |  | ||||||
|                 await self.homeassistant.run() |  | ||||||
|  |  | ||||||
|             _LOGGER.info("Full-Restore %s done", snapshot.slug) |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         except (OSError, ValueError, tarfile.TarError) as err: |  | ||||||
|             _LOGGER.info("Full-Restore %s error -> %s", slug, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         finally: |  | ||||||
|             self.sheduler.suspend = False |  | ||||||
|             self._lock.release() |  | ||||||
|  |  | ||||||
|     async def do_restore_partial(self, snapshot, homeassistant=False, |  | ||||||
|                                  addons=None, folders=None): |  | ||||||
|         """Restore a snapshot.""" |  | ||||||
|         if self._lock.locked(): |  | ||||||
|             _LOGGER.error("It is already a snapshot/restore process running") |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         addons = addons or [] |  | ||||||
|         folders = folders or [] |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Partial-Restore %s start", snapshot.slug) |  | ||||||
|         try: |  | ||||||
|             self.sheduler.suspend = True |  | ||||||
|             await self._lock.acquire() |  | ||||||
|  |  | ||||||
|             async with snapshot: |  | ||||||
|                 tasks = [] |  | ||||||
|  |  | ||||||
|                 if FOLDER_HOMEASSISTANT in folders: |  | ||||||
|                     await self.homeassistant.stop() |  | ||||||
|  |  | ||||||
|                 if folders: |  | ||||||
|                     _LOGGER.info("Partial-Restore %s restore folders %s", |  | ||||||
|                                  snapshot.slug, folders) |  | ||||||
|                     await snapshot.restore_folders(folders) |  | ||||||
|  |  | ||||||
|                 if homeassistant: |  | ||||||
|                     _LOGGER.info("Partial-Restore %s restore Home-Assistant", |  | ||||||
|                                  snapshot.slug) |  | ||||||
|                     snapshot.restore_homeassistant(self.homeassistant) |  | ||||||
|                     tasks.append(self.homeassistant.update( |  | ||||||
|                         snapshot.homeassistant_version)) |  | ||||||
|  |  | ||||||
|                 for slug in addons: |  | ||||||
|                     addon = self.addons.get(slug) |  | ||||||
|                     if addon: |  | ||||||
|                         tasks.append(snapshot.export_addon(addon)) |  | ||||||
|                     else: |  | ||||||
|                         _LOGGER.warning("Can't restore addon %s", slug) |  | ||||||
|  |  | ||||||
|                 if tasks: |  | ||||||
|                     _LOGGER.info("Partial-Restore %s run %d tasks", |  | ||||||
|                                  snapshot.slug, len(tasks)) |  | ||||||
|                     await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|                 # make sure homeassistant run agen |  | ||||||
|                 await self.homeassistant.run() |  | ||||||
|  |  | ||||||
|             _LOGGER.info("Partial-Restore %s done", snapshot.slug) |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         except (OSError, ValueError, tarfile.TarError) as err: |  | ||||||
|             _LOGGER.info("Partial-Restore %s error -> %s", slug, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         finally: |  | ||||||
|             self.sheduler.suspend = False |  | ||||||
|             self._lock.release() |  | ||||||
| @@ -1,368 +0,0 @@ | |||||||
| """Represent a snapshot file.""" |  | ||||||
| import asyncio |  | ||||||
| import json |  | ||||||
| import logging |  | ||||||
| from pathlib import Path |  | ||||||
| import tarfile |  | ||||||
| from tempfile import TemporaryDirectory |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
| from voluptuous.humanize import humanize_error |  | ||||||
|  |  | ||||||
| from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS |  | ||||||
| from .util import remove_folder |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES, |  | ||||||
|     ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES, |  | ||||||
|     ATTR_IMAGE, ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT) |  | ||||||
| from ..tools import write_json_file |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Snapshot(object): |  | ||||||
|     """A signle hassio snapshot.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, tar_file): |  | ||||||
|         """Initialize a snapshot.""" |  | ||||||
|         self.loop = loop |  | ||||||
|         self.config = config |  | ||||||
|         self.tar_file = tar_file |  | ||||||
|         self._data = {} |  | ||||||
|         self._tmp = None |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def slug(self): |  | ||||||
|         """Return snapshot slug.""" |  | ||||||
|         return self._data.get(ATTR_SLUG) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def sys_type(self): |  | ||||||
|         """Return snapshot type.""" |  | ||||||
|         return self._data.get(ATTR_TYPE) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def name(self): |  | ||||||
|         """Return snapshot name.""" |  | ||||||
|         return self._data[ATTR_NAME] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def date(self): |  | ||||||
|         """Return snapshot date.""" |  | ||||||
|         return self._data[ATTR_DATE] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def addons(self): |  | ||||||
|         """Return snapshot date.""" |  | ||||||
|         return self._data[ATTR_ADDONS] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def folders(self): |  | ||||||
|         """Return list of saved folders.""" |  | ||||||
|         return self._data[ATTR_FOLDERS] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def repositories(self): |  | ||||||
|         """Return snapshot date.""" |  | ||||||
|         return self._data[ATTR_REPOSITORIES] |  | ||||||
|  |  | ||||||
|     @repositories.setter |  | ||||||
|     def repositories(self, value): |  | ||||||
|         """Set snapshot date.""" |  | ||||||
|         self._data[ATTR_REPOSITORIES] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_version(self): |  | ||||||
|         """Return snapshot homeassistant version.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION) |  | ||||||
|  |  | ||||||
|     @homeassistant_version.setter |  | ||||||
|     def homeassistant_version(self, value): |  | ||||||
|         """Set snapshot homeassistant version.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_devices(self): |  | ||||||
|         """Return snapshot homeassistant devices.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_DEVICES) |  | ||||||
|  |  | ||||||
|     @homeassistant_devices.setter |  | ||||||
|     def homeassistant_devices(self, value): |  | ||||||
|         """Set snapshot homeassistant devices.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_DEVICES] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_image(self): |  | ||||||
|         """Return snapshot homeassistant custom image.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_IMAGE) |  | ||||||
|  |  | ||||||
|     @homeassistant_image.setter |  | ||||||
|     def homeassistant_image(self, value): |  | ||||||
|         """Set snapshot homeassistant custom image.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_ssl(self): |  | ||||||
|         """Return snapshot homeassistant api ssl.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_SSL) |  | ||||||
|  |  | ||||||
|     @homeassistant_ssl.setter |  | ||||||
|     def homeassistant_ssl(self, value): |  | ||||||
|         """Set snapshot homeassistant api ssl.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_SSL] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_port(self): |  | ||||||
|         """Return snapshot homeassistant api port.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_PORT) |  | ||||||
|  |  | ||||||
|     @homeassistant_port.setter |  | ||||||
|     def homeassistant_port(self, value): |  | ||||||
|         """Set snapshot homeassistant api port.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_PORT] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_password(self): |  | ||||||
|         """Return snapshot homeassistant api password.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_PASSWORD) |  | ||||||
|  |  | ||||||
|     @homeassistant_password.setter |  | ||||||
|     def homeassistant_password(self, value): |  | ||||||
|         """Set snapshot homeassistant api password.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_PASSWORD] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_watchdog(self): |  | ||||||
|         """Return snapshot homeassistant watchdog options.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_WATCHDOG) |  | ||||||
|  |  | ||||||
|     @homeassistant_watchdog.setter |  | ||||||
|     def homeassistant_watchdog(self, value): |  | ||||||
|         """Set snapshot homeassistant watchdog options.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_WATCHDOG] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def homeassistant_boot(self): |  | ||||||
|         """Return snapshot homeassistant boot options.""" |  | ||||||
|         return self._data[ATTR_HOMEASSISTANT].get(ATTR_BOOT) |  | ||||||
|  |  | ||||||
|     @homeassistant_boot.setter |  | ||||||
|     def homeassistant_boot(self, value): |  | ||||||
|         """Set snapshot homeassistant boot options.""" |  | ||||||
|         self._data[ATTR_HOMEASSISTANT][ATTR_BOOT] = value |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def size(self): |  | ||||||
|         """Return snapshot size.""" |  | ||||||
|         if not self.tar_file.is_file(): |  | ||||||
|             return 0 |  | ||||||
|         return self.tar_file.stat().st_size / 1048576  # calc mbyte |  | ||||||
|  |  | ||||||
|     def create(self, slug, name, date, sys_type): |  | ||||||
|         """Initialize a new snapshot.""" |  | ||||||
|         # init metadata |  | ||||||
|         self._data[ATTR_SLUG] = slug |  | ||||||
|         self._data[ATTR_NAME] = name |  | ||||||
|         self._data[ATTR_DATE] = date |  | ||||||
|         self._data[ATTR_TYPE] = sys_type |  | ||||||
|  |  | ||||||
|         # init other constructs |  | ||||||
|         self._data[ATTR_HOMEASSISTANT] = {} |  | ||||||
|         self._data[ATTR_ADDONS] = [] |  | ||||||
|         self._data[ATTR_REPOSITORIES] = [] |  | ||||||
|         self._data[ATTR_FOLDERS] = [] |  | ||||||
|  |  | ||||||
|     def snapshot_homeassistant(self, homeassistant): |  | ||||||
|         """Read all data from homeassistant object.""" |  | ||||||
|         self.homeassistant_version = homeassistant.version |  | ||||||
|         self.homeassistant_devices = homeassistant.devices |  | ||||||
|         self.homeassistant_watchdog = homeassistant.watchdog |  | ||||||
|         self.homeassistant_boot = homeassistant.boot |  | ||||||
|  |  | ||||||
|         # custom image |  | ||||||
|         if homeassistant.is_custom_image: |  | ||||||
|             self.homeassistant_image = homeassistant.image |  | ||||||
|  |  | ||||||
|         # api |  | ||||||
|         self.homeassistant_port = homeassistant.api_port |  | ||||||
|         self.homeassistant_ssl = homeassistant.api_ssl |  | ||||||
|         self.homeassistant_password = homeassistant.api_password |  | ||||||
|  |  | ||||||
|     def restore_homeassistant(self, homeassistant): |  | ||||||
|         """Write all data to homeassistant object.""" |  | ||||||
|         homeassistant.devices = self.homeassistant_devices |  | ||||||
|         homeassistant.watchdog = self.homeassistant_watchdog |  | ||||||
|         homeassistant.boot = self.homeassistant_boot |  | ||||||
|  |  | ||||||
|         # custom image |  | ||||||
|         if self.homeassistant_image: |  | ||||||
|             homeassistant.set_custom( |  | ||||||
|                 self.homeassistant_image, self.homeassistant_version) |  | ||||||
|  |  | ||||||
|         # api |  | ||||||
|         homeassistant.api_port = self.homeassistant_port |  | ||||||
|         homeassistant.api_ssl = self.homeassistant_ssl |  | ||||||
|         homeassistant.api_password = self.homeassistant_password |  | ||||||
|  |  | ||||||
|     async def load(self): |  | ||||||
|         """Read snapshot.json from tar file.""" |  | ||||||
|         if not self.tar_file.is_file(): |  | ||||||
|             _LOGGER.error("No tarfile %s", self.tar_file) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         def _load_file(): |  | ||||||
|             """Read snapshot.json.""" |  | ||||||
|             with tarfile.open(self.tar_file, "r:") as snapshot: |  | ||||||
|                 json_file = snapshot.extractfile("./snapshot.json") |  | ||||||
|                 return json_file.read() |  | ||||||
|  |  | ||||||
|         # read snapshot.json |  | ||||||
|         try: |  | ||||||
|             raw = await self.loop.run_in_executor(None, _load_file) |  | ||||||
|         except (tarfile.TarError, KeyError) as err: |  | ||||||
|             _LOGGER.error( |  | ||||||
|                 "Can't read snapshot tarfile %s -> %s", self.tar_file, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # parse data |  | ||||||
|         try: |  | ||||||
|             raw_dict = json.loads(raw) |  | ||||||
|         except json.JSONDecodeError as err: |  | ||||||
|             _LOGGER.error("Can't read data for %s -> %s", self.tar_file, err) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # validate |  | ||||||
|         try: |  | ||||||
|             self._data = SCHEMA_SNAPSHOT(raw_dict) |  | ||||||
|         except vol.Invalid as err: |  | ||||||
|             _LOGGER.error("Can't validate data for %s -> %s", self.tar_file, |  | ||||||
|                           humanize_error(raw_dict, err)) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def __aenter__(self): |  | ||||||
|         """Async context to open a snapshot.""" |  | ||||||
|         self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp)) |  | ||||||
|  |  | ||||||
|         # create a snapshot |  | ||||||
|         if not self.tar_file.is_file(): |  | ||||||
|             return self |  | ||||||
|  |  | ||||||
|         # extract a exists snapshot |  | ||||||
|         def _extract_snapshot(): |  | ||||||
|             """Extract a snapshot.""" |  | ||||||
|             with tarfile.open(self.tar_file, "r:") as tar: |  | ||||||
|                 tar.extractall(path=self._tmp.name) |  | ||||||
|  |  | ||||||
|         await self.loop.run_in_executor(None, _extract_snapshot) |  | ||||||
|  |  | ||||||
|     async def __aexit__(self, exception_type, exception_value, traceback): |  | ||||||
|         """Async context to close a snapshot.""" |  | ||||||
|         # exists snapshot or exception on build |  | ||||||
|         if self.tar_file.is_file() or exception_type is not None: |  | ||||||
|             self._tmp.cleanup() |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # validate data |  | ||||||
|         try: |  | ||||||
|             self._data = SCHEMA_SNAPSHOT(self._data) |  | ||||||
|         except vol.Invalid as err: |  | ||||||
|             _LOGGER.error("Invalid data for %s -> %s", self.tar_file, |  | ||||||
|                           humanize_error(self._data, err)) |  | ||||||
|             raise ValueError("Invalid config") from None |  | ||||||
|  |  | ||||||
|         # new snapshot, build it |  | ||||||
|         def _create_snapshot(): |  | ||||||
|             """Create a new snapshot.""" |  | ||||||
|             with tarfile.open(self.tar_file, "w:") as tar: |  | ||||||
|                 tar.add(self._tmp.name, arcname=".") |  | ||||||
|  |  | ||||||
|         if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data): |  | ||||||
|             await self.loop.run_in_executor(None, _create_snapshot) |  | ||||||
|         else: |  | ||||||
|             _LOGGER.error("Can't write snapshot.json") |  | ||||||
|  |  | ||||||
|         self._tmp.cleanup() |  | ||||||
|  |  | ||||||
|     async def import_addon(self, addon): |  | ||||||
|         """Add a addon into snapshot.""" |  | ||||||
|         snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug)) |  | ||||||
|  |  | ||||||
|         if not await addon.snapshot(snapshot_file): |  | ||||||
|             _LOGGER.error("Can't make snapshot from %s", addon.slug) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # store to config |  | ||||||
|         self._data[ATTR_ADDONS].append({ |  | ||||||
|             ATTR_SLUG: addon.slug, |  | ||||||
|             ATTR_NAME: addon.name, |  | ||||||
|             ATTR_VERSION: addon.version_installed, |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def export_addon(self, addon): |  | ||||||
|         """Restore a addon from snapshot.""" |  | ||||||
|         snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug)) |  | ||||||
|  |  | ||||||
|         if not await addon.restore(snapshot_file): |  | ||||||
|             _LOGGER.error("Can't restore snapshot for %s", addon.slug) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     async def store_folders(self, folder_list=None): |  | ||||||
|         """Backup hassio data into snapshot.""" |  | ||||||
|         folder_list = folder_list or ALL_FOLDERS |  | ||||||
|  |  | ||||||
|         def _folder_save(name): |  | ||||||
|             """Intenal function to snapshot a folder.""" |  | ||||||
|             slug_name = name.replace("/", "_") |  | ||||||
|             snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name)) |  | ||||||
|             origin_dir = Path(self.config.path_hassio, name) |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 _LOGGER.info("Snapshot folder %s", name) |  | ||||||
|                 with tarfile.open(snapshot_tar, "w:gz", |  | ||||||
|                                   compresslevel=1) as tar_file: |  | ||||||
|                     tar_file.add(origin_dir, arcname=".") |  | ||||||
|                     _LOGGER.info("Snapshot folder %s done", name) |  | ||||||
|  |  | ||||||
|                 self._data[ATTR_FOLDERS].append(name) |  | ||||||
|             except tarfile.TarError as err: |  | ||||||
|                 _LOGGER.warning("Can't snapshot folder %s -> %s", name, err) |  | ||||||
|  |  | ||||||
|         # run tasks |  | ||||||
|         tasks = [self.loop.run_in_executor(None, _folder_save, folder) |  | ||||||
|                  for folder in folder_list] |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks, loop=self.loop) |  | ||||||
|  |  | ||||||
|     async def restore_folders(self, folder_list=None): |  | ||||||
|         """Backup hassio data into snapshot.""" |  | ||||||
|         folder_list = folder_list or ALL_FOLDERS |  | ||||||
|  |  | ||||||
|         def _folder_restore(name): |  | ||||||
|             """Intenal function to restore a folder.""" |  | ||||||
|             slug_name = name.replace("/", "_") |  | ||||||
|             snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name)) |  | ||||||
|             origin_dir = Path(self.config.path_hassio, name) |  | ||||||
|  |  | ||||||
|             # clean old stuff |  | ||||||
|             if origin_dir.is_dir(): |  | ||||||
|                 remove_folder(origin_dir) |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 _LOGGER.info("Restore folder %s", name) |  | ||||||
|                 with tarfile.open(snapshot_tar, "r:gz") as tar_file: |  | ||||||
|                     tar_file.extractall(path=origin_dir) |  | ||||||
|                     _LOGGER.info("Restore folder %s done", name) |  | ||||||
|             except tarfile.TarError as err: |  | ||||||
|                 _LOGGER.warning("Can't restore folder %s -> %s", name, err) |  | ||||||
|  |  | ||||||
|         # run tasks |  | ||||||
|         tasks = [self.loop.run_in_executor(None, _folder_restore, folder) |  | ||||||
|                  for folder in folder_list] |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks, loop=self.loop) |  | ||||||
| @@ -1,21 +0,0 @@ | |||||||
| """Util addons functions.""" |  | ||||||
| import hashlib |  | ||||||
| import shutil |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def create_slug(name, date_str): |  | ||||||
|     """Generate a hash from repository.""" |  | ||||||
|     key = "{} - {}".format(date_str, name).lower().encode() |  | ||||||
|     return hashlib.sha1(key).hexdigest()[:8] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_folder(folder): |  | ||||||
|     """Remove folder data but not the folder itself.""" |  | ||||||
|     for obj in folder.iterdir(): |  | ||||||
|         try: |  | ||||||
|             if obj.is_dir(): |  | ||||||
|                 shutil.rmtree(str(obj), ignore_errors=True) |  | ||||||
|             else: |  | ||||||
|                 obj.unlink() |  | ||||||
|         except (OSError, shutil.Error): |  | ||||||
|             pass |  | ||||||
| @@ -1,38 +0,0 @@ | |||||||
| """Validate some things around restore.""" |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE, |  | ||||||
|     ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES, |  | ||||||
|     ATTR_IMAGE, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, |  | ||||||
|     FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL, |  | ||||||
|     SNAPSHOT_FULL, SNAPSHOT_PARTIAL) |  | ||||||
| from ..validate import HASS_DEVICES, NETWORK_PORT |  | ||||||
|  |  | ||||||
| ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL] |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_SNAPSHOT = vol.Schema({ |  | ||||||
|     vol.Required(ATTR_SLUG): vol.Coerce(str), |  | ||||||
|     vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]), |  | ||||||
|     vol.Required(ATTR_NAME): vol.Coerce(str), |  | ||||||
|     vol.Required(ATTR_DATE): vol.Coerce(str), |  | ||||||
|     vol.Required(ATTR_HOMEASSISTANT): vol.Schema({ |  | ||||||
|         vol.Required(ATTR_VERSION): vol.Coerce(str), |  | ||||||
|         vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES, |  | ||||||
|         vol.Optional(ATTR_IMAGE): vol.Coerce(str), |  | ||||||
|         vol.Optional(ATTR_BOOT, default=True): vol.Boolean(), |  | ||||||
|         vol.Optional(ATTR_SSL, default=False): vol.Boolean(), |  | ||||||
|         vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT, |  | ||||||
|         vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), |  | ||||||
|         vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(), |  | ||||||
|     }), |  | ||||||
|     vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)], |  | ||||||
|     vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({ |  | ||||||
|         vol.Required(ATTR_SLUG): vol.Coerce(str), |  | ||||||
|         vol.Required(ATTR_NAME): vol.Coerce(str), |  | ||||||
|         vol.Required(ATTR_VERSION): vol.Coerce(str), |  | ||||||
|     })], |  | ||||||
|     vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()], |  | ||||||
| }, extra=vol.ALLOW_EXTRA) |  | ||||||
							
								
								
									
										115
									
								
								hassio/tasks.py
									
									
									
									
									
								
							
							
						
						
									
										115
									
								
								hassio/tasks.py
									
									
									
									
									
								
							| @@ -1,115 +0,0 @@ | |||||||
| """Multible tasks.""" |  | ||||||
| import asyncio |  | ||||||
| from datetime import datetime |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def api_sessions_cleanup(config): |  | ||||||
|     """Create scheduler task for cleanup api sessions.""" |  | ||||||
|     async def _api_sessions_cleanup(): |  | ||||||
|         """Cleanup old api sessions.""" |  | ||||||
|         now = datetime.now() |  | ||||||
|         for session, until_valid in config.security_sessions.items(): |  | ||||||
|             if now >= until_valid: |  | ||||||
|                 config.drop_security_session(session) |  | ||||||
|  |  | ||||||
|     return _api_sessions_cleanup |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def addons_update(loop, addons): |  | ||||||
|     """Create scheduler task for auto update addons.""" |  | ||||||
|     async def _addons_update(): |  | ||||||
|         """Check if a update is available of a addon and update it.""" |  | ||||||
|         tasks = [] |  | ||||||
|         for addon in addons.list_addons: |  | ||||||
|             if not addon.is_installed or not addon.auto_update: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             if addon.version_installed == addon.last_version: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             if addon.test_udpate_schema(): |  | ||||||
|                 tasks.append(addon.update()) |  | ||||||
|             else: |  | ||||||
|                 _LOGGER.warning( |  | ||||||
|                     "Addon %s will be ignore, schema tests fails", addon.slug) |  | ||||||
|  |  | ||||||
|         if tasks: |  | ||||||
|             _LOGGER.info("Addon auto update process %d tasks", len(tasks)) |  | ||||||
|             await asyncio.wait(tasks, loop=loop) |  | ||||||
|  |  | ||||||
|     return _addons_update |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def hassio_update(supervisor, updater): |  | ||||||
|     """Create scheduler task for update of supervisor hassio.""" |  | ||||||
|     async def _hassio_update(): |  | ||||||
|         """Check and run update of supervisor hassio.""" |  | ||||||
|         await updater.fetch_data() |  | ||||||
|         if updater.version_hassio == supervisor.version: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # don't perform a update on beta/dev channel |  | ||||||
|         if updater.beta_channel: |  | ||||||
|             _LOGGER.warning("Ignore Hass.IO update on beta upstream!") |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Found new HassIO version %s.", updater.version_hassio) |  | ||||||
|         await supervisor.update(updater.version_hassio) |  | ||||||
|  |  | ||||||
|     return _hassio_update |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def homeassistant_watchdog_docker(loop, homeassistant): |  | ||||||
|     """Create scheduler task for montoring running state of docker.""" |  | ||||||
|     async def _homeassistant_watchdog_docker(): |  | ||||||
|         """Check running state of docker and start if they is close.""" |  | ||||||
|         # if Home-Assistant is active |  | ||||||
|         if not await homeassistant.is_initialize() or \ |  | ||||||
|                 not homeassistant.watchdog: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # if Home-Assistant is running |  | ||||||
|         if homeassistant.in_progress or await homeassistant.is_running(): |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         loop.create_task(homeassistant.run()) |  | ||||||
|         _LOGGER.error("Watchdog found a problem with Home-Assistant docker!") |  | ||||||
|  |  | ||||||
|     return _homeassistant_watchdog_docker |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def homeassistant_watchdog_api(loop, homeassistant): |  | ||||||
|     """Create scheduler task for montoring running state of API. |  | ||||||
|  |  | ||||||
|     Try 2 times to call API before we restart Home-Assistant. Maybe we had a |  | ||||||
|     delay in our system. |  | ||||||
|     """ |  | ||||||
|     retry_scan = 0 |  | ||||||
|  |  | ||||||
|     async def _homeassistant_watchdog_api(): |  | ||||||
|         """Check running state of API and start if they is close.""" |  | ||||||
|         nonlocal retry_scan |  | ||||||
|  |  | ||||||
|         # if Home-Assistant is active |  | ||||||
|         if not await homeassistant.is_initialize() or \ |  | ||||||
|                 not homeassistant.watchdog: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # if Home-Assistant API is up |  | ||||||
|         if homeassistant.in_progress or await homeassistant.check_api_state(): |  | ||||||
|             return |  | ||||||
|         retry_scan += 1 |  | ||||||
|  |  | ||||||
|         # Retry active |  | ||||||
|         if retry_scan == 1: |  | ||||||
|             _LOGGER.warning("Watchdog miss API response from Home-Assistant") |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         loop.create_task(homeassistant.restart()) |  | ||||||
|         _LOGGER.error("Watchdog found a problem with Home-Assistant API!") |  | ||||||
|         retry_scan = 0 |  | ||||||
|  |  | ||||||
|     return _homeassistant_watchdog_api |  | ||||||
							
								
								
									
										167
									
								
								hassio/tools.py
									
									
									
									
									
								
							
							
						
						
									
										167
									
								
								hassio/tools.py
									
									
									
									
									
								
							| @@ -1,167 +0,0 @@ | |||||||
| """Tools file for HassIO.""" |  | ||||||
| import asyncio |  | ||||||
| from contextlib import suppress |  | ||||||
| from datetime import datetime, timedelta, timezone |  | ||||||
| import json |  | ||||||
| import logging |  | ||||||
| import re |  | ||||||
|  |  | ||||||
| import aiohttp |  | ||||||
| import async_timeout |  | ||||||
| import pytz |  | ||||||
| import voluptuous as vol |  | ||||||
| from voluptuous.humanize import humanize_error |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| FREEGEOIP_URL = "https://freegeoip.io/json/" |  | ||||||
|  |  | ||||||
| RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))") |  | ||||||
|  |  | ||||||
| # Copyright (c) Django Software Foundation and individual contributors. |  | ||||||
| # All rights reserved. |  | ||||||
| # https://github.com/django/django/blob/master/LICENSE |  | ||||||
| DATETIME_RE = re.compile( |  | ||||||
|     r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})' |  | ||||||
|     r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})' |  | ||||||
|     r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?' |  | ||||||
|     r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$' |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def write_json_file(jsonfile, data): |  | ||||||
|     """Write a json file.""" |  | ||||||
|     try: |  | ||||||
|         json_str = json.dumps(data, indent=2) |  | ||||||
|         with jsonfile.open('w') as conf_file: |  | ||||||
|             conf_file.write(json_str) |  | ||||||
|     except (OSError, json.JSONDecodeError): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def read_json_file(jsonfile): |  | ||||||
|     """Read a json file and return a dict.""" |  | ||||||
|     with jsonfile.open('r') as cfile: |  | ||||||
|         return json.loads(cfile.read()) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def fetch_timezone(websession): |  | ||||||
|     """Read timezone from freegeoip.""" |  | ||||||
|     data = {} |  | ||||||
|     with suppress(aiohttp.ClientError, asyncio.TimeoutError, |  | ||||||
|                   json.JSONDecodeError, KeyError): |  | ||||||
|         with async_timeout.timeout(10, loop=websession.loop): |  | ||||||
|             async with websession.get(FREEGEOIP_URL) as request: |  | ||||||
|                 data = await request.json() |  | ||||||
|  |  | ||||||
|     return data.get('time_zone', 'UTC') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def convert_to_ascii(raw): |  | ||||||
|     """Convert binary to ascii and remove colors.""" |  | ||||||
|     return RE_STRING.sub("", raw.decode()) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Copyright (c) Django Software Foundation and individual contributors. |  | ||||||
| # All rights reserved. |  | ||||||
| # https://github.com/django/django/blob/master/LICENSE |  | ||||||
| def parse_datetime(dt_str): |  | ||||||
|     """Parse a string and return a datetime.datetime. |  | ||||||
|  |  | ||||||
|     This function supports time zone offsets. When the input contains one, |  | ||||||
|     the output uses a timezone with a fixed offset from UTC. |  | ||||||
|     Raises ValueError if the input is well formatted but not a valid datetime. |  | ||||||
|     Returns None if the input isn't well formatted. |  | ||||||
|     """ |  | ||||||
|     match = DATETIME_RE.match(dt_str) |  | ||||||
|     if not match: |  | ||||||
|         return None |  | ||||||
|     kws = match.groupdict()  # type: Dict[str, Any] |  | ||||||
|     if kws['microsecond']: |  | ||||||
|         kws['microsecond'] = kws['microsecond'].ljust(6, '0') |  | ||||||
|     tzinfo_str = kws.pop('tzinfo') |  | ||||||
|  |  | ||||||
|     tzinfo = None  # type: Optional[dt.tzinfo] |  | ||||||
|     if tzinfo_str == 'Z': |  | ||||||
|         tzinfo = pytz.utc |  | ||||||
|     elif tzinfo_str is not None: |  | ||||||
|         offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0 |  | ||||||
|         offset_hours = int(tzinfo_str[1:3]) |  | ||||||
|         offset = timedelta(hours=offset_hours, minutes=offset_mins) |  | ||||||
|         if tzinfo_str[0] == '-': |  | ||||||
|             offset = -offset |  | ||||||
|         tzinfo = timezone(offset) |  | ||||||
|     else: |  | ||||||
|         tzinfo = None |  | ||||||
|     kws = {k: int(v) for k, v in kws.items() if v is not None} |  | ||||||
|     kws['tzinfo'] = tzinfo |  | ||||||
|     return datetime(**kws) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class JsonConfig(object): |  | ||||||
|     """Hass core object for handle it.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, json_file, schema): |  | ||||||
|         """Initialize hass object.""" |  | ||||||
|         self._file = json_file |  | ||||||
|         self._schema = schema |  | ||||||
|         self._data = {} |  | ||||||
|  |  | ||||||
|         # init or load data |  | ||||||
|         if self._file.is_file(): |  | ||||||
|             try: |  | ||||||
|                 self._data = read_json_file(self._file) |  | ||||||
|             except (OSError, json.JSONDecodeError): |  | ||||||
|                 _LOGGER.warning("Can't read %s", self._file) |  | ||||||
|                 self._data = {} |  | ||||||
|  |  | ||||||
|         # validate |  | ||||||
|         try: |  | ||||||
|             self._data = self._schema(self._data) |  | ||||||
|         except vol.Invalid as ex: |  | ||||||
|             _LOGGER.error("Can't parse %s -> %s", |  | ||||||
|                           self._file, humanize_error(self._data, ex)) |  | ||||||
|             # reset data to default |  | ||||||
|             self._data = self._schema({}) |  | ||||||
|  |  | ||||||
|     def save(self): |  | ||||||
|         """Store data to config file.""" |  | ||||||
|         # validate |  | ||||||
|         try: |  | ||||||
|             self._data = self._schema(self._data) |  | ||||||
|         except vol.Invalid as ex: |  | ||||||
|             _LOGGER.error("Can't parse data -> %s", |  | ||||||
|                           humanize_error(self._data, ex)) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         # write |  | ||||||
|         if not write_json_file(self._file, self._data): |  | ||||||
|             _LOGGER.error("Can't store config in %s", self._file) |  | ||||||
|             return False |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AsyncThrottle(object): |  | ||||||
|     """ |  | ||||||
|     Decorator that prevents a function from being called more than once every |  | ||||||
|     time period. |  | ||||||
|     """ |  | ||||||
|     def __init__(self, delta): |  | ||||||
|         """Initialize async throttle.""" |  | ||||||
|         self.throttle_period = delta |  | ||||||
|         self.time_of_last_call = datetime.min |  | ||||||
|  |  | ||||||
|     def __call__(self, method): |  | ||||||
|         """Throttle function""" |  | ||||||
|         async def wrapper(*args, **kwargs): |  | ||||||
|             """Throttle function wrapper""" |  | ||||||
|             now = datetime.now() |  | ||||||
|             time_since_last_call = now - self.time_of_last_call |  | ||||||
|  |  | ||||||
|             if time_since_last_call > self.throttle_period: |  | ||||||
|                 self.time_of_last_call = now |  | ||||||
|                 return await method(*args, **kwargs) |  | ||||||
|  |  | ||||||
|         return wrapper |  | ||||||
| @@ -1,86 +0,0 @@ | |||||||
| """Fetch last versions from webserver.""" |  | ||||||
| import asyncio |  | ||||||
| from datetime import timedelta |  | ||||||
| import json |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| import aiohttp |  | ||||||
| import async_timeout |  | ||||||
|  |  | ||||||
| from .const import ( |  | ||||||
|     URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO, |  | ||||||
|     ATTR_BETA_CHANNEL) |  | ||||||
| from .tools import AsyncThrottle, JsonConfig |  | ||||||
| from .validate import SCHEMA_UPDATER_CONFIG |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Updater(JsonConfig): |  | ||||||
|     """Fetch last versions from version.json.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, config, loop, websession): |  | ||||||
|         """Initialize updater.""" |  | ||||||
|         super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG) |  | ||||||
|         self.config = config |  | ||||||
|         self.loop = loop |  | ||||||
|         self.websession = websession |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def version_homeassistant(self): |  | ||||||
|         """Return last version of homeassistant.""" |  | ||||||
|         return self._data.get(ATTR_HOMEASSISTANT) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def version_hassio(self): |  | ||||||
|         """Return last version of hassio.""" |  | ||||||
|         return self._data.get(ATTR_HASSIO) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def upstream(self): |  | ||||||
|         """Return Upstream branch for version.""" |  | ||||||
|         if self.beta_channel: |  | ||||||
|             return 'dev' |  | ||||||
|         return 'master' |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def beta_channel(self): |  | ||||||
|         """Return True if we run in beta upstream.""" |  | ||||||
|         return self._data[ATTR_BETA_CHANNEL] |  | ||||||
|  |  | ||||||
|     @beta_channel.setter |  | ||||||
|     def beta_channel(self, value): |  | ||||||
|         """Set beta upstream mode.""" |  | ||||||
|         self._data[ATTR_BETA_CHANNEL] = bool(value) |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @AsyncThrottle(timedelta(seconds=60)) |  | ||||||
|     async def fetch_data(self): |  | ||||||
|         """Fetch current versions from github. |  | ||||||
|  |  | ||||||
|         Is a coroutine. |  | ||||||
|         """ |  | ||||||
|         url = URL_HASSIO_VERSION.format(self.upstream) |  | ||||||
|         try: |  | ||||||
|             _LOGGER.info("Fetch update data from %s", url) |  | ||||||
|             with async_timeout.timeout(10, loop=self.loop): |  | ||||||
|                 async with self.websession.get(url) as request: |  | ||||||
|                     data = await request.json(content_type=None) |  | ||||||
|  |  | ||||||
|         except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err: |  | ||||||
|             _LOGGER.warning("Can't fetch versions from %s -> %s", url, err) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         except json.JSONDecodeError as err: |  | ||||||
|             _LOGGER.warning("Can't parse versions from %s -> %s", url, err) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # data valid? |  | ||||||
|         if not data: |  | ||||||
|             _LOGGER.warning("Invalid data from %s", url) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # update versions |  | ||||||
|         self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant') |  | ||||||
|         self._data[ATTR_HASSIO] = data.get('hassio') |  | ||||||
|         self.save() |  | ||||||
| @@ -1,92 +0,0 @@ | |||||||
| """Validate functions.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| import pytz |  | ||||||
|  |  | ||||||
| from .const import ( |  | ||||||
|     ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD, |  | ||||||
|     ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE, |  | ||||||
|     ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT, |  | ||||||
|     ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL, |  | ||||||
|     ATTR_PORT, ATTR_WATCHDOG) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535)) |  | ||||||
| HASS_DEVICES = [vol.Match(r"^[^/]*$")] |  | ||||||
| ALSA_CHANNEL = vol.Match(r"\d+,\d+") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_timezone(timezone): |  | ||||||
|     """Validate voluptuous timezone.""" |  | ||||||
|     try: |  | ||||||
|         pytz.timezone(timezone) |  | ||||||
|     except pytz.exceptions.UnknownTimeZoneError: |  | ||||||
|         raise vol.Invalid( |  | ||||||
|             "Invalid time zone passed in. Valid options can be found here: " |  | ||||||
|             "http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \ |  | ||||||
|                 from None |  | ||||||
|  |  | ||||||
|     return timezone |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def convert_to_docker_ports(data): |  | ||||||
|     """Convert data into docker port list.""" |  | ||||||
|     # dynamic ports |  | ||||||
|     if data is None: |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     # single port |  | ||||||
|     if isinstance(data, int): |  | ||||||
|         return NETWORK_PORT(data) |  | ||||||
|  |  | ||||||
|     # port list |  | ||||||
|     if isinstance(data, list) and len(data) > 2: |  | ||||||
|         return vol.Schema([NETWORK_PORT])(data) |  | ||||||
|  |  | ||||||
|     # ip port mapping |  | ||||||
|     if isinstance(data, list) and len(data) == 2: |  | ||||||
|         return (vol.Coerce(str)(data[0]), NETWORK_PORT(data[1])) |  | ||||||
|  |  | ||||||
|     raise vol.Invalid("Can't validate docker host settings") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| DOCKER_PORTS = vol.Schema({ |  | ||||||
|     vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")): |  | ||||||
|         convert_to_docker_ports, |  | ||||||
| }) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_HASS_CONFIG = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES, |  | ||||||
|     vol.Optional(ATTR_BOOT, default=True): vol.Boolean(), |  | ||||||
|     vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str), |  | ||||||
|     vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT, |  | ||||||
|     vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), |  | ||||||
|     vol.Optional(ATTR_SSL, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(), |  | ||||||
| }, extra=vol.REMOVE_EXTRA) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_UPDATER_CONFIG = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_HASSIO): vol.Coerce(str), |  | ||||||
| }, extra=vol.REMOVE_EXTRA) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_HASSIO_CONFIG = vol.Schema({ |  | ||||||
|     vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone, |  | ||||||
|     vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): [vol.Url()], |  | ||||||
|     vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(), |  | ||||||
|     vol.Optional(ATTR_TOTP): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_PASSWORD): vol.Coerce(str), |  | ||||||
|     vol.Optional(ATTR_SESSIONS, default={}): |  | ||||||
|         vol.Schema({vol.Coerce(str): vol.Coerce(str)}), |  | ||||||
|     vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL, |  | ||||||
|     vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL, |  | ||||||
| }, extra=vol.REMOVE_EXTRA) |  | ||||||
 Submodule home-assistant-polymer updated: 9b9cba86c2...ebb20abee0
									
								
							
							
								
								
									
										
											BIN
										
									
								
								misc/hassio.png
									
									
									
									
									
								
							
							
						
						
									
										
											BIN
										
									
								
								misc/hassio.png
									
									
									
									
									
								
							
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 42 KiB | 
| @@ -1 +0,0 @@ | |||||||
| <mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36" version="6.5.6" editor="www.draw.io" type="device"><diagram name="Page-1">5Vptc6M2EP41/ng3gHj9mPiSy820c5n6Q3sfsVBsNTJyhYid/voKkABZkOBY+KYtmYnR6pVn99ld1l6A5e74laX77a80Q2ThOdlxAb4sPC8OY/G/Erw2At9xG8GG4awR9QQr/DeSQkdKS5yhQhvIKSUc73UhpHmOINdkKWP0oA97okTfdZ9ukCFYwZSY0t9xxrdS6oZJ1/GA8GYrt469sOlYp/B5w2iZy/0WHniqr6Z7l6q15IMW2zSjh54I3C3AklHKm7vdcYlIBa2CrZl3P9LbnpuhnE+Z4DUTXlJSInXikIipt09UrCAOyF8lKOFfJVUdn4paZTdigNjtKD5ERw206DtIYKrenLJdSrrJ4m5TfX5fqX3E2Zqtmg4JS7urd9hijlb7FFbtg7A2MWjLd0S03Oo0mJAlJZTVowXYKIRQyAvO6DPq9Tj1Jc+/kutLvF4Q4+g4CqHbKkbYO6I7xNmrGKImJKCZIm09SKRuD53l+Arobc9oQjkulca6aZfuFCZupM6G9QcM/X3LcaW31WvB0e5CNGGG1vF6CE0QggRkrb7sAhhNBNCzAKBvAPiFwmfELkUOokCQ/trI+SZy3hBywAJyoYHcw9JArXaFqJpRUe9MLscQDXN5HQd+4NjB0A8DHcPQxDBwTAgDCxAmBl4oE3FINinjW7qheUruOumtjmgPPXTE/I9K/DkKZPOH6srFwZq+QDV/yBX+RJy/ygiclpwKUbfxL5Tu5RrNUavzvQ20eBxaMihHRTJ4p2yDeM9uTHUwRFKOX/TVLwFX5RK20fXeQDcB3im+deMRMSweALGfBbp/JdCj0Xxi3UX48xIMN6wSjNMEYlXuEXvBhXAJagOm+h7Sovj2fTTBaMXr0aSjMwP3fbdluKflMgybVEN3aFmA4sy347ZAoLstMJB1uPGA33JtRE3Xm4Nbbo9Yyou13NJ4VbuxeUnkqveOHouiK7EIzOO6NHh1dE/iQtc89VyFwIPfVK9YQgCJYBqGSnyPidpzqm5QnpmLCWFvqcFMfrm0qlgvvlZQUm8cvaxJrPLpRjy6wLByU9dxRSmKn6CtLFR3Rd5A/t56HS1/9224ovDKXHE/O3qQ/+zG8aWBfiKtPmjxwLR4d0Sn1i3enyVUSJ30srCJCPYcTk5zpHmb8xQ2Vl+AJXtp+WpPYdeKPa5ZUrjJMpoXhhqLbbqvbveMQlQU73sn3ZVN9lX34qr9fZMTCt07XhiBxANhEHtx7PhgpqRqyJN5bmB6ssSCI1O1nDmJ0rVOHdWlqYAkU59uc7zoXEAAOfWR4vq9Q5WqneE0Wq3Q0FJO6hdSz1ynobKxTm0U7dNMs5PYJCjk1KxYKX6WO9IMALcVOzAUyKdrRB5pgTmmuRiyppzTnRhAqo7btoitVVbrMna3xg3Bm2oup+fRvCvEnpZu5QYWiHxS0wEDNR0wkJBYqciaNJ5AUifSWOq/x1LX5OgUOk5Ity8PgO97LQshEng/L0SqvXsMPBwOpvcmBO+LWg2SiZDQMrs4Tl6FQInuz3xnIKeP5iovgLcLo9K4P5DEn8mRmTLEXqzt3hyaQ3qj0faDNPFNmjTmaz+S+icmc+pN7YVAMP6tjfNQrkcjIUzZ5fQL62uAfkH1Z4d+CThJJ4boN1TdsxLBopnY17f7yGaWOT9lP8i+YAb2TVZjYJDkK+bbuekxFp2QmwUomocevnppvQo94v9LcEpCnaOR5dgU/idjk/m9+G9oX71qUYbReBXl30s+Vf6dgXyi2f0WqlFG93szcPcP</diagram></mxfile> |  | ||||||
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 36 KiB | 
| @@ -1 +0,0 @@ | |||||||
| <mxfile userAgent="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0" version="6.5.8" editor="www.draw.io" type="device"><diagram name="Page-1">5Vxdd5s4EP01fmwOkgCbx9hp2j7sNrvpnnYfiVFsTjDyghwn++tXGMmAxileEB9O+9BjBhjM3GHmzjXKhCw2L58Sf7v+jQU0mmAreJmQmwnGU4eI/zPDa24gyM0NqyQMchMqDPfhv1QaLWndhQFNKwdyxiIebqvGJYtjuuQVm58kbF897JFF1atu/RUFhvulH0Hr9zDg69w6w25h/0zD1VpdGblevufBXz6tEraL5fUmmDwe/uW7N77yJW80XfsB25dM5OOELBLGeP5p87KgURZaFbb8vNs39h6/d0Jjfs4JOD/h2Y928tZvwyTlwnTP/YTLL8lfVWA4fRF+52u+iYQBiY8pT9gTXbCIJcISs1gcOX8Mo0gz+VG4isXmUnwzKuzzZ5rwUIT8Wu7YhEGQXWa+X4ec3m/9ZXbNvcivzCGL+b38Go7aztMGeWIb3rcMRXYV+lIyyTh8omxDefIqDpF7ySw/Q6asKxHaF/gjS9rWJewVkr5MudXRcRF28UFG/jQKBKDwVypipAe/FPUtC2N+uKIznzg3mYUmobhwFtoblvA1W7HYj+4KawcxQhgGyT0Vo5mBINkgSJ/9NB1hkDAiw0XJAVFaiyhdffk6wkDZ7oCBckGg2JbGh1uKs2b2drT0wvXAOGcbsYPGwXXWfDJbxJZPP4uSqK4ryiuZTYNKU4JhK4VFRSChkc/D52rbOhUW6e0uQ7pAwNOeZ1sLbMp2yZLKk8ptRPMjoNMc4aqj/HaBowNIxzs8C7cpwE2ckdLlLgm5uNPbMH5kvaLnDIYenmrPj9sQPuLUODIH3wzCNxVxFtdz/9llrGcexiEvtibkOiNwfpTS7KjpTVtsD085mQd+uqaBPE/slmRilm29hPyH+PzBurIcuf232LauCFH7S5XwxvpZpuQQVDKlyaPfMlNsy60AjK2mmYJrHJnLFA9kip8+ZfsP+WHdfe8+E856/kk/EOqsApOGECJS48gchGqcK2GYUm4Sw8vss7hpoT5GVDlyvM6wg6NhtdGyLQ9ZLAi4G2WF+kHMK+7qULK1gr4VBHTPkkAv6nrJt7b70iFGir1Kj/K4iC6vsWPPUGMHjgzmCxxiq/mS0jQVCfNGvvyvZOk1VxQdQFcWmlbowNRtRQfsMacc0XWNpikHHL2RcgIG/7V0mJxJWyYlFA306lSk5Rv5Jg94oq+mM66egDSqW31xSm16J9OmGTOrcWSwSEF5xMi43xGSA1FL0rTd6NQSODKIJNRvfmfJxodQvmPJGlfZoN2nZo2gEHMZorWDYJQ6UxkR1DsuRLXuN0xw2L8c2brXSGE4Ug+mW6vkHn6gdpqKIbpw7RDcVcc6JtpolGv11I1g3HAcQ+MGcGQQwBOKyBnaNU/E0XhROY4zvn2fGrfKqUZ1wrDK7TSWTXCNI4NJBWWTXOYejb6tiF7fU4jbVIHQpxDgyCB6UF/IZ4Xete3x9GK3aSnXxW3X7kzcPvHrfzdi5SAypVuVKV3itqros1EzhykyxByAoz6FylOvNbx7obI3XqANbNPG70nMahwZrFBQOBizUjkUSZjqM3VTkgAcGYQSihuXoZR5fQobBAobF6KU9RsmqCJcjlLWb6TguD6YUqaSe3h27plSyrzulDJS9ypB70qZeupGwHc9U0oZcGQQwPqf3dsoZflxFy6UkTZlwrBQ5pkSyoAjgzkFf7ovhLLbb1+/3XWfDGfVCnzubGyYCiPLlGAGPRmEESovZcXMCJAX2pqRZUo5Q1Z30hmpW4DRjXSWdYVDLzgcNcu64gVqaSrZRsotEDIlpkFPfapppH6VyftT03ojD/qqvebLjmZ1ngyWLSjCjFlPG4xEIFOCGvRkDky1TPHEy3+iSooiia2TPOLXeRVw5kqeVWoauKtXAW2oSY1U4LQ1noQ9G4SpuwXsGIRptAqnM2ScoPwzZolz0FBBouMvRTvwOT3WQJ2GywJZEHAzHLrgzIpB54wZ2a0Ys32iOaoHaQDGfHyd+rjQXWld7ZfMqwbaQb+E5Kc6s0mVzeDANsR6LNIy1fCJVDt3CUYXw5lWWWyvYaoRp85Tn8OZA8nbH39+WLCAts2YrtZTnVtuWg9Wem1pysXJTAPcsc8DvAmckPyNHM5z9ZbWo5UOgtvw+UWkzpNBOCFJ/ZKvzv7lJiqtPx8LV3l1lXpNp+VIJTaLv/mWo1b8XT3y8T8=</diagram></mxfile> |  | ||||||
							
								
								
									
										27
									
								
								pylintrc
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								pylintrc
									
									
									
									
									
								
							| @@ -15,24 +15,37 @@ reports=no | |||||||
| # abstract-method - with intro of async there are always methods missing | # abstract-method - with intro of async there are always methods missing | ||||||
|  |  | ||||||
| disable= | disable= | ||||||
|   locally-disabled, |  | ||||||
|   duplicate-code, |  | ||||||
|   cyclic-import, |  | ||||||
|   abstract-class-little-used, |   abstract-class-little-used, | ||||||
|   abstract-class-not-used, |   abstract-class-not-used, | ||||||
|   unused-argument, |   abstract-method, | ||||||
|  |   cyclic-import, | ||||||
|  |   duplicate-code, | ||||||
|   global-statement, |   global-statement, | ||||||
|  |   locally-disabled, | ||||||
|  |   not-context-manager, | ||||||
|   redefined-variable-type, |   redefined-variable-type, | ||||||
|  |   too-few-public-methods, | ||||||
|   too-many-arguments, |   too-many-arguments, | ||||||
|   too-many-branches, |   too-many-branches, | ||||||
|   too-many-instance-attributes, |   too-many-instance-attributes, | ||||||
|  |   too-many-lines, | ||||||
|   too-many-locals, |   too-many-locals, | ||||||
|   too-many-public-methods, |   too-many-public-methods, | ||||||
|   too-many-return-statements, |   too-many-return-statements, | ||||||
|   too-many-statements, |   too-many-statements, | ||||||
|   too-many-lines, |   unused-argument, | ||||||
|  |   line-too-long, | ||||||
|  |   bad-continuation, | ||||||
|   too-few-public-methods, |   too-few-public-methods, | ||||||
|   abstract-method |   no-self-use, | ||||||
|  |   not-async-context-manager, | ||||||
|  |   too-many-locals, | ||||||
|  |   too-many-branches, | ||||||
|  |   no-else-return | ||||||
|  |  | ||||||
| [EXCEPTIONS] | [EXCEPTIONS] | ||||||
| overgeneral-exceptions=Exception,HomeAssistantError | overgeneral-exceptions=Exception | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [TYPECHECK] | ||||||
|  | ignored-modules = distutils | ||||||
|   | |||||||
							
								
								
									
										18
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | |||||||
|  | aiohttp==3.6.1 | ||||||
|  | async_timeout==3.0.1 | ||||||
|  | attrs==19.3.0 | ||||||
|  | cchardet==2.1.6 | ||||||
|  | colorlog==4.1.0 | ||||||
|  | cpe==1.2.1 | ||||||
|  | cryptography==2.9 | ||||||
|  | docker==4.2.0 | ||||||
|  | gitpython==3.1.1 | ||||||
|  | jinja2==2.11.2 | ||||||
|  | packaging==20.3 | ||||||
|  | ptvsd==4.3.2 | ||||||
|  | pulsectl==20.2.4 | ||||||
|  | pytz==2019.3 | ||||||
|  | pyudev==0.22.0 | ||||||
|  | ruamel.yaml==0.15.100 | ||||||
|  | uvloop==0.14.0 | ||||||
|  | voluptuous==0.11.7 | ||||||
							
								
								
									
										6
									
								
								requirements_tests.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								requirements_tests.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | flake8==3.7.9 | ||||||
|  | pylint==2.4.4 | ||||||
|  | pytest==5.4.1 | ||||||
|  | pytest-timeout==1.3.4 | ||||||
|  | pytest-aiohttp==0.3.0 | ||||||
|  | black==19.10b0 | ||||||
							
								
								
									
										9
									
								
								rootfs/etc/cont-init.d/udev.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								rootfs/etc/cont-init.d/udev.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | |||||||
|  | #!/usr/bin/with-contenv bashio | ||||||
|  | # ============================================================================== | ||||||
|  | # Start udev service | ||||||
|  | # ============================================================================== | ||||||
|  | udevd --daemon | ||||||
|  |  | ||||||
|  | bashio::log.info "Update udev informations" | ||||||
|  | udevadm trigger | ||||||
|  | udevadm settle | ||||||
							
								
								
									
										35
									
								
								rootfs/etc/pulse/client.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								rootfs/etc/pulse/client.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | |||||||
|  | # This file is part of PulseAudio. | ||||||
|  | # | ||||||
|  | # PulseAudio is free software; you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Lesser General Public License as published by | ||||||
|  | # the Free Software Foundation; either version 2 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | # | ||||||
|  | # PulseAudio is distributed in the hope that it will be useful, but | ||||||
|  | # WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||||||
|  | # General Public License for more details. | ||||||
|  | # | ||||||
|  | # You should have received a copy of the GNU Lesser General Public License | ||||||
|  | # along with PulseAudio; if not, see <http://www.gnu.org/licenses/>. | ||||||
|  |  | ||||||
|  | ## Configuration file for PulseAudio clients. See pulse-client.conf(5) for | ||||||
|  | ## more information. Default values are commented out.  Use either ; or # for | ||||||
|  | ## commenting. | ||||||
|  |  | ||||||
|  | ; default-sink = | ||||||
|  | ; default-source = | ||||||
|  | default-server = unix://data/audio/external/pulse.sock | ||||||
|  | ; default-dbus-server = | ||||||
|  |  | ||||||
|  | autospawn = no | ||||||
|  | ; daemon-binary = /usr/bin/pulseaudio | ||||||
|  | ; extra-arguments = --log-target=syslog | ||||||
|  |  | ||||||
|  | ; cookie-file = | ||||||
|  |  | ||||||
|  | ; enable-shm = yes | ||||||
|  | ; shm-size-bytes = 0 # setting this 0 will use the system-default, usually 64 MiB | ||||||
|  |  | ||||||
|  | ; auto-connect-localhost = no | ||||||
|  | ; auto-connect-display = no | ||||||
							
								
								
									
										5
									
								
								rootfs/etc/services.d/supervisor/finish
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								rootfs/etc/services.d/supervisor/finish
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | |||||||
|  | #!/usr/bin/execlineb -S0 | ||||||
|  | # ============================================================================== | ||||||
|  | # Take down the S6 supervision tree when Supervisor fails | ||||||
|  | # ============================================================================== | ||||||
|  | s6-svscanctl -t /var/run/s6/services | ||||||
							
								
								
									
										7
									
								
								rootfs/etc/services.d/supervisor/run
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								rootfs/etc/services.d/supervisor/run
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | #!/usr/bin/with-contenv bashio | ||||||
|  | # ============================================================================== | ||||||
|  | # Start Service service | ||||||
|  | # ============================================================================== | ||||||
|  | export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" | ||||||
|  |  | ||||||
|  | exec python3 -m supervisor | ||||||
							
								
								
									
										133
									
								
								scripts/test_env.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										133
									
								
								scripts/test_env.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,133 @@ | |||||||
|  | #!/bin/bash | ||||||
|  | set -eE | ||||||
|  |  | ||||||
|  | DOCKER_TIMEOUT=30 | ||||||
|  | DOCKER_PID=0 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | function start_docker() { | ||||||
|  |     local starttime | ||||||
|  |     local endtime | ||||||
|  |  | ||||||
|  |     echo "Starting docker." | ||||||
|  |     dockerd 2> /dev/null & | ||||||
|  |     DOCKER_PID=$! | ||||||
|  |  | ||||||
|  |     echo "Waiting for docker to initialize..." | ||||||
|  |     starttime="$(date +%s)" | ||||||
|  |     endtime="$(date +%s)" | ||||||
|  |     until docker info >/dev/null 2>&1; do | ||||||
|  |         if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then | ||||||
|  |             sleep 1 | ||||||
|  |             endtime=$(date +%s) | ||||||
|  |         else | ||||||
|  |             echo "Timeout while waiting for docker to come up" | ||||||
|  |             exit 1 | ||||||
|  |         fi | ||||||
|  |     done | ||||||
|  |     echo "Docker was initialized" | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | function stop_docker() { | ||||||
|  |     local starttime | ||||||
|  |     local endtime | ||||||
|  |  | ||||||
|  |     echo "Stopping in container docker..." | ||||||
|  |     if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then | ||||||
|  |         starttime="$(date +%s)" | ||||||
|  |         endtime="$(date +%s)" | ||||||
|  |  | ||||||
|  |         # Now wait for it to die | ||||||
|  |         kill "$DOCKER_PID" | ||||||
|  |         while kill -0 "$DOCKER_PID" 2> /dev/null; do | ||||||
|  |             if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then | ||||||
|  |                 sleep 1 | ||||||
|  |                 endtime=$(date +%s) | ||||||
|  |             else | ||||||
|  |                 echo "Timeout while waiting for container docker to die" | ||||||
|  |                 exit 1 | ||||||
|  |             fi | ||||||
|  |         done | ||||||
|  |     else | ||||||
|  |         echo "Your host might have been left with unreleased resources" | ||||||
|  |     fi | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | function build_supervisor() { | ||||||
|  |     docker pull homeassistant/amd64-builder:dev | ||||||
|  |  | ||||||
|  |     docker run --rm --privileged \ | ||||||
|  |         -v /run/docker.sock:/run/docker.sock -v "$(pwd):/data" \ | ||||||
|  |         homeassistant/amd64-builder:dev \ | ||||||
|  |             --generic dev -t /data --test --amd64 --no-cache | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | function cleanup_lastboot() { | ||||||
|  |     if [[ -f /workspaces/test_supervisor/config.json ]]; then | ||||||
|  |         echo "Cleaning up last boot" | ||||||
|  |         cp /workspaces/test_supervisor/config.json /tmp/config.json | ||||||
|  |         jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json | ||||||
|  |         rm /tmp/config.json | ||||||
|  |     fi | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | function cleanup_docker() { | ||||||
|  |     echo "Cleaning up stopped containers..." | ||||||
|  |     docker rm $(docker ps -a -q) || true | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | function setup_test_env() { | ||||||
|  |     mkdir -p /workspaces/test_supervisor | ||||||
|  |  | ||||||
|  |     echo "Start Supervisor" | ||||||
|  |     docker run --rm --privileged \ | ||||||
|  |         --name hassio_supervisor \ | ||||||
|  |         --security-opt seccomp=unconfined \ | ||||||
|  |         --security-opt apparmor:unconfined \ | ||||||
|  |         -v /run/docker.sock:/run/docker.sock \ | ||||||
|  |         -v /run/dbus:/run/dbus \ | ||||||
|  |         -v "/workspaces/test_supervisor":/data \ | ||||||
|  |         -v /etc/machine-id:/etc/machine-id:ro \ | ||||||
|  |         -e SUPERVISOR_SHARE="/workspaces/test_supervisor" \ | ||||||
|  |         -e SUPERVISOR_NAME=hassio_supervisor \ | ||||||
|  |         -e SUPERVISOR_DEV=1 \ | ||||||
|  |         -e SUPERVISOR_MACHINE="qemux86-64" \ | ||||||
|  |         homeassistant/amd64-hassio-supervisor:latest | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | function init_dbus() { | ||||||
|  |     if pgrep dbus-daemon; then | ||||||
|  |         echo "Dbus is running" | ||||||
|  |         return 0 | ||||||
|  |     fi | ||||||
|  |  | ||||||
|  |     echo "Startup dbus" | ||||||
|  |     mkdir -p /var/lib/dbus | ||||||
|  |     cp -f /etc/machine-id /var/lib/dbus/machine-id | ||||||
|  |  | ||||||
|  |     # cleanups | ||||||
|  |     mkdir -p /run/dbus | ||||||
|  |     rm -f /run/dbus/pid | ||||||
|  |  | ||||||
|  |     # run | ||||||
|  |     dbus-daemon --system --print-address | ||||||
|  | } | ||||||
|  |  | ||||||
|  | echo "Start Test-Env" | ||||||
|  |  | ||||||
|  | start_docker | ||||||
|  | trap "stop_docker" ERR | ||||||
|  |  | ||||||
|  | build_supervisor | ||||||
|  | cleanup_lastboot | ||||||
|  | cleanup_docker | ||||||
|  | init_dbus | ||||||
|  | setup_test_env | ||||||
|  | stop_docker | ||||||
							
								
								
									
										18
									
								
								scripts/update-frontend.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										18
									
								
								scripts/update-frontend.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,18 @@ | |||||||
|  | #!/bin/bash | ||||||
|  | set -e | ||||||
|  |  | ||||||
|  | # Update frontend | ||||||
|  | git submodule update --init --recursive --remote | ||||||
|  |  | ||||||
|  | [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" | ||||||
|  | cd home-assistant-polymer | ||||||
|  | nvm install | ||||||
|  | script/bootstrap | ||||||
|  |  | ||||||
|  | # build frontend | ||||||
|  | cd hassio | ||||||
|  | ./script/build_hassio | ||||||
|  |  | ||||||
|  | # Copy frontend | ||||||
|  | rm -f ../../supervisor/hassio/api/panel/chunk.* | ||||||
|  | cp -rf build/* ../../supervisor/api/panel/ | ||||||
							
								
								
									
										17
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | |||||||
|  | [isort] | ||||||
|  | multi_line_output = 3 | ||||||
|  | include_trailing_comma=True | ||||||
|  | force_grid_wrap=0 | ||||||
|  | line_length=88 | ||||||
|  | indent = "    " | ||||||
|  | not_skip = __init__.py | ||||||
|  | force_sort_within_sections = true | ||||||
|  | sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER | ||||||
|  | default_section = THIRDPARTY | ||||||
|  | forced_separate = tests | ||||||
|  | combine_as_imports = true | ||||||
|  | use_parentheses = true | ||||||
|  |  | ||||||
|  | [flake8] | ||||||
|  | max-line-length = 88 | ||||||
|  | ignore = E501, W503 | ||||||
							
								
								
									
										74
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										74
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,52 +1,44 @@ | |||||||
|  | """Home Assistant Supervisor setup.""" | ||||||
| from setuptools import setup | from setuptools import setup | ||||||
|  |  | ||||||
| from hassio.const import HASSIO_VERSION | from supervisor.const import SUPERVISOR_VERSION | ||||||
|  |  | ||||||
|  |  | ||||||
| setup( | setup( | ||||||
|     name='HassIO', |     name="Supervisor", | ||||||
|     version=HASSIO_VERSION, |     version=SUPERVISOR_VERSION, | ||||||
|     license='BSD License', |     license="BSD License", | ||||||
|     author='The Home Assistant Authors', |     author="The Home Assistant Authors", | ||||||
|     author_email='hello@home-assistant.io', |     author_email="hello@home-assistant.io", | ||||||
|     url='https://home-assistant.io/', |     url="https://home-assistant.io/", | ||||||
|     description=('Open-source private cloud os for Home-Assistant' |     description=("Open-source private cloud os for Home-Assistant" " based on HassOS"), | ||||||
|                  ' based on ResinOS'), |     long_description=( | ||||||
|     long_description=('A maintainless private cloud operator system that' |         "A maintainless private cloud operator system that" | ||||||
|                       'setup a Home-Assistant instance. Based on ResinOS'), |         "setup a Home-Assistant instance. Based on HassOS" | ||||||
|  |     ), | ||||||
|     classifiers=[ |     classifiers=[ | ||||||
|         'Intended Audience :: End Users/Desktop', |         "Intended Audience :: End Users/Desktop", | ||||||
|         'Intended Audience :: Developers', |         "Intended Audience :: Developers", | ||||||
|         'License :: OSI Approved :: Apache Software License', |         "License :: OSI Approved :: Apache Software License", | ||||||
|         'Operating System :: OS Independent', |         "Operating System :: OS Independent", | ||||||
|         'Topic :: Home Automation' |         "Topic :: Home Automation", | ||||||
|         'Topic :: Software Development :: Libraries :: Python Modules', |         "Topic :: Software Development :: Libraries :: Python Modules", | ||||||
|         'Topic :: Scientific/Engineering :: Atmospheric Science', |         "Topic :: Scientific/Engineering :: Atmospheric Science", | ||||||
|         'Development Status :: 5 - Production/Stable', |         "Development Status :: 5 - Production/Stable", | ||||||
|         'Intended Audience :: Developers', |         "Intended Audience :: Developers", | ||||||
|         'Programming Language :: Python :: 3.6', |         "Programming Language :: Python :: 3.7", | ||||||
|     ], |     ], | ||||||
|     keywords=['docker', 'home-assistant', 'api'], |     keywords=["docker", "home-assistant", "api"], | ||||||
|     zip_safe=False, |     zip_safe=False, | ||||||
|     platforms='any', |     platforms="any", | ||||||
|     packages=[ |     packages=[ | ||||||
|         'hassio', |         "supervisor", | ||||||
|         'hassio.dock', |         "supervisor.docker", | ||||||
|         'hassio.api', |         "supervisor.addons", | ||||||
|         'hassio.addons', |         "supervisor.api", | ||||||
|         'hassio.snapshots' |         "supervisor.misc", | ||||||
|  |         "supervisor.utils", | ||||||
|  |         "supervisor.plugins", | ||||||
|  |         "supervisor.snapshots", | ||||||
|     ], |     ], | ||||||
|     include_package_data=True, |     include_package_data=True, | ||||||
|     install_requires=[ |  | ||||||
|         'async_timeout', |  | ||||||
|         'aiohttp', |  | ||||||
|         'docker', |  | ||||||
|         'colorlog', |  | ||||||
|         'voluptuous', |  | ||||||
|         'gitpython', |  | ||||||
|         'pyotp', |  | ||||||
|         'pyqrcode', |  | ||||||
|         'pytz', |  | ||||||
|         'pyudev' |  | ||||||
|     ] |  | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										1
									
								
								supervisor/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								supervisor/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | """Init file for Supervisor.""" | ||||||
							
								
								
									
										62
									
								
								supervisor/__main__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								supervisor/__main__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | |||||||
|  | """Main file for Supervisor.""" | ||||||
|  | import asyncio | ||||||
|  | from concurrent.futures import ThreadPoolExecutor | ||||||
|  | import logging | ||||||
|  | import sys | ||||||
|  |  | ||||||
|  | from supervisor import bootstrap | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def initialize_event_loop(): | ||||||
|  |     """Attempt to use uvloop.""" | ||||||
|  |     try: | ||||||
|  |         # pylint: disable=import-outside-toplevel | ||||||
|  |         import uvloop | ||||||
|  |  | ||||||
|  |         uvloop.install() | ||||||
|  |     except ImportError: | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     return asyncio.get_event_loop() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # pylint: disable=invalid-name | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     bootstrap.initialize_logging() | ||||||
|  |  | ||||||
|  |     # Init async event loop | ||||||
|  |     loop = initialize_event_loop() | ||||||
|  |  | ||||||
|  |     # Check if all information are available to setup Supervisor | ||||||
|  |     bootstrap.check_environment() | ||||||
|  |  | ||||||
|  |     # init executor pool | ||||||
|  |     executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker") | ||||||
|  |     loop.set_default_executor(executor) | ||||||
|  |  | ||||||
|  |     _LOGGER.info("Initialize Supervisor setup") | ||||||
|  |     coresys = loop.run_until_complete(bootstrap.initialize_coresys()) | ||||||
|  |     loop.run_until_complete(coresys.core.connect()) | ||||||
|  |  | ||||||
|  |     bootstrap.supervisor_debugger(coresys) | ||||||
|  |     bootstrap.migrate_system_env(coresys) | ||||||
|  |  | ||||||
|  |     _LOGGER.info("Setup Supervisor") | ||||||
|  |     loop.run_until_complete(coresys.core.setup()) | ||||||
|  |  | ||||||
|  |     loop.call_soon_threadsafe(loop.create_task, coresys.core.start()) | ||||||
|  |     loop.call_soon_threadsafe(bootstrap.reg_signal, loop) | ||||||
|  |  | ||||||
|  |     try: | ||||||
|  |         _LOGGER.info("Run Supervisor") | ||||||
|  |         loop.run_forever() | ||||||
|  |     finally: | ||||||
|  |         _LOGGER.info("Stopping Supervisor") | ||||||
|  |         loop.run_until_complete(coresys.core.stop()) | ||||||
|  |         executor.shutdown(wait=False) | ||||||
|  |         loop.close() | ||||||
|  |  | ||||||
|  |     _LOGGER.info("Close Supervisor") | ||||||
|  |     sys.exit(0) | ||||||
							
								
								
									
										334
									
								
								supervisor/addons/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										334
									
								
								supervisor/addons/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,334 @@ | |||||||
|  | """Init file for Supervisor add-ons.""" | ||||||
|  | import asyncio | ||||||
|  | from contextlib import suppress | ||||||
|  | import logging | ||||||
|  | import tarfile | ||||||
|  | from typing import Dict, List, Optional, Union | ||||||
|  |  | ||||||
|  | from ..const import BOOT_AUTO, STATE_STARTED | ||||||
|  | from ..coresys import CoreSys, CoreSysAttributes | ||||||
|  | from ..exceptions import ( | ||||||
|  |     AddonsError, | ||||||
|  |     AddonsNotSupportedError, | ||||||
|  |     CoreDNSError, | ||||||
|  |     DockerAPIError, | ||||||
|  |     HomeAssistantAPIError, | ||||||
|  |     HostAppArmorError, | ||||||
|  | ) | ||||||
|  | from ..store.addon import AddonStore | ||||||
|  | from .addon import Addon | ||||||
|  | from .data import AddonsData | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | AnyAddon = Union[Addon, AddonStore] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AddonManager(CoreSysAttributes): | ||||||
|  |     """Manage add-ons inside Supervisor.""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coresys: CoreSys): | ||||||
|  |         """Initialize Docker base wrapper.""" | ||||||
|  |         self.coresys: CoreSys = coresys | ||||||
|  |         self.data: AddonsData = AddonsData(coresys) | ||||||
|  |         self.local: Dict[str, Addon] = {} | ||||||
|  |         self.store: Dict[str, AddonStore] = {} | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def all(self) -> List[AnyAddon]: | ||||||
|  |         """Return a list of all add-ons.""" | ||||||
|  |         addons = {**self.store, **self.local} | ||||||
|  |         return list(addons.values()) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def installed(self) -> List[Addon]: | ||||||
|  |         """Return a list of all installed add-ons.""" | ||||||
|  |         return list(self.local.values()) | ||||||
|  |  | ||||||
|  |     def get(self, addon_slug: str) -> Optional[AnyAddon]: | ||||||
|  |         """Return an add-on from slug. | ||||||
|  |  | ||||||
|  |         Prio: | ||||||
|  |           1 - Local | ||||||
|  |           2 - Store | ||||||
|  |         """ | ||||||
|  |         if addon_slug in self.local: | ||||||
|  |             return self.local[addon_slug] | ||||||
|  |         return self.store.get(addon_slug) | ||||||
|  |  | ||||||
|  |     def from_token(self, token: str) -> Optional[Addon]: | ||||||
|  |         """Return an add-on from Supervisor token.""" | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if token == addon.supervisor_token: | ||||||
|  |                 return addon | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     async def load(self) -> None: | ||||||
|  |         """Start up add-on management.""" | ||||||
|  |         tasks = [] | ||||||
|  |         for slug in self.data.system: | ||||||
|  |             addon = self.local[slug] = Addon(self.coresys, slug) | ||||||
|  |             tasks.append(addon.load()) | ||||||
|  |  | ||||||
|  |         # Run initial tasks | ||||||
|  |         _LOGGER.info("Found %d installed add-ons", len(tasks)) | ||||||
|  |         if tasks: | ||||||
|  |             await asyncio.wait(tasks) | ||||||
|  |  | ||||||
|  |         # Sync DNS | ||||||
|  |         await self.sync_dns() | ||||||
|  |  | ||||||
|  |     async def boot(self, stage: str) -> None: | ||||||
|  |         """Boot add-ons with mode auto.""" | ||||||
|  |         tasks = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if addon.boot != BOOT_AUTO or addon.startup != stage: | ||||||
|  |                 continue | ||||||
|  |             tasks.append(addon.start()) | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks)) | ||||||
|  |         if tasks: | ||||||
|  |             await asyncio.wait(tasks) | ||||||
|  |             await asyncio.sleep(self.sys_config.wait_boot) | ||||||
|  |  | ||||||
|  |     async def shutdown(self, stage: str) -> None: | ||||||
|  |         """Shutdown addons.""" | ||||||
|  |         tasks = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if await addon.state() != STATE_STARTED or addon.startup != stage: | ||||||
|  |                 continue | ||||||
|  |             tasks.append(addon.stop()) | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks)) | ||||||
|  |         if tasks: | ||||||
|  |             await asyncio.wait(tasks) | ||||||
|  |  | ||||||
|  |     async def install(self, slug: str) -> None: | ||||||
|  |         """Install an add-on.""" | ||||||
|  |         if slug in self.local: | ||||||
|  |             _LOGGER.warning("Add-on %s is already installed", slug) | ||||||
|  |             return | ||||||
|  |         store = self.store.get(slug) | ||||||
|  |  | ||||||
|  |         if not store: | ||||||
|  |             _LOGGER.error("Add-on %s not exists", slug) | ||||||
|  |             raise AddonsError() | ||||||
|  |  | ||||||
|  |         if not store.available: | ||||||
|  |             _LOGGER.error("Add-on %s not supported on that platform", slug) | ||||||
|  |             raise AddonsNotSupportedError() | ||||||
|  |  | ||||||
|  |         self.data.install(store) | ||||||
|  |         addon = Addon(self.coresys, slug) | ||||||
|  |  | ||||||
|  |         if not addon.path_data.is_dir(): | ||||||
|  |             _LOGGER.info("Create Home Assistant add-on data folder %s", addon.path_data) | ||||||
|  |             addon.path_data.mkdir() | ||||||
|  |  | ||||||
|  |         # Setup/Fix AppArmor profile | ||||||
|  |         await addon.install_apparmor() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             await addon.instance.install(store.version, store.image) | ||||||
|  |         except DockerAPIError: | ||||||
|  |             self.data.uninstall(addon) | ||||||
|  |             raise AddonsError() from None | ||||||
|  |         else: | ||||||
|  |             self.local[slug] = addon | ||||||
|  |             _LOGGER.info("Add-on '%s' successfully installed", slug) | ||||||
|  |  | ||||||
|  |     async def uninstall(self, slug: str) -> None: | ||||||
|  |         """Remove an add-on.""" | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.warning("Add-on %s is not installed", slug) | ||||||
|  |             return | ||||||
|  |         addon = self.local.get(slug) | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             await addon.instance.remove() | ||||||
|  |         except DockerAPIError: | ||||||
|  |             raise AddonsError() from None | ||||||
|  |  | ||||||
|  |         await addon.remove_data() | ||||||
|  |  | ||||||
|  |         # Cleanup audio settings | ||||||
|  |         if addon.path_pulse.exists(): | ||||||
|  |             with suppress(OSError): | ||||||
|  |                 addon.path_pulse.unlink() | ||||||
|  |  | ||||||
|  |         # Cleanup AppArmor profile | ||||||
|  |         with suppress(HostAppArmorError): | ||||||
|  |             await addon.uninstall_apparmor() | ||||||
|  |  | ||||||
|  |         # Cleanup Ingress panel from sidebar | ||||||
|  |         if addon.ingress_panel: | ||||||
|  |             addon.ingress_panel = False | ||||||
|  |             with suppress(HomeAssistantAPIError): | ||||||
|  |                 await self.sys_ingress.update_hass_panel(addon) | ||||||
|  |  | ||||||
|  |         # Cleanup discovery data | ||||||
|  |         for message in self.sys_discovery.list_messages: | ||||||
|  |             if message.addon != addon.slug: | ||||||
|  |                 continue | ||||||
|  |             self.sys_discovery.remove(message) | ||||||
|  |  | ||||||
|  |         # Cleanup services data | ||||||
|  |         for service in self.sys_services.list_services: | ||||||
|  |             if addon.slug not in service.active: | ||||||
|  |                 continue | ||||||
|  |             service.del_service_data(addon) | ||||||
|  |  | ||||||
|  |         self.data.uninstall(addon) | ||||||
|  |         self.local.pop(slug) | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Add-on '%s' successfully removed", slug) | ||||||
|  |  | ||||||
|  |     async def update(self, slug: str) -> None: | ||||||
|  |         """Update add-on.""" | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.error("Add-on %s is not installed", slug) | ||||||
|  |             raise AddonsError() | ||||||
|  |         addon = self.local.get(slug) | ||||||
|  |  | ||||||
|  |         if addon.is_detached: | ||||||
|  |             _LOGGER.error("Add-on %s is not available inside store", slug) | ||||||
|  |             raise AddonsError() | ||||||
|  |         store = self.store.get(slug) | ||||||
|  |  | ||||||
|  |         if addon.version == store.version: | ||||||
|  |             _LOGGER.warning("No update available for add-on %s", slug) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Check if available, Maybe something have changed | ||||||
|  |         if not store.available: | ||||||
|  |             _LOGGER.error("Add-on %s not supported on that platform", slug) | ||||||
|  |             raise AddonsNotSupportedError() | ||||||
|  |  | ||||||
|  |         # Update instance | ||||||
|  |         last_state = await addon.state() | ||||||
|  |         try: | ||||||
|  |             await addon.instance.update(store.version, store.image) | ||||||
|  |  | ||||||
|  |             # Cleanup | ||||||
|  |             with suppress(DockerAPIError): | ||||||
|  |                 await addon.instance.cleanup() | ||||||
|  |         except DockerAPIError: | ||||||
|  |             raise AddonsError() from None | ||||||
|  |         else: | ||||||
|  |             self.data.update(store) | ||||||
|  |             _LOGGER.info("Add-on '%s' successfully updated", slug) | ||||||
|  |  | ||||||
|  |         # Setup/Fix AppArmor profile | ||||||
|  |         await addon.install_apparmor() | ||||||
|  |  | ||||||
|  |         # restore state | ||||||
|  |         if last_state == STATE_STARTED: | ||||||
|  |             await addon.start() | ||||||
|  |  | ||||||
|  |     async def rebuild(self, slug: str) -> None: | ||||||
|  |         """Perform a rebuild of local build add-on.""" | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.error("Add-on %s is not installed", slug) | ||||||
|  |             raise AddonsError() | ||||||
|  |         addon = self.local.get(slug) | ||||||
|  |  | ||||||
|  |         if addon.is_detached: | ||||||
|  |             _LOGGER.error("Add-on %s is not available inside store", slug) | ||||||
|  |             raise AddonsError() | ||||||
|  |         store = self.store.get(slug) | ||||||
|  |  | ||||||
|  |         # Check if a rebuild is possible now | ||||||
|  |         if addon.version != store.version: | ||||||
|  |             _LOGGER.error("Version changed, use Update instead Rebuild") | ||||||
|  |             raise AddonsError() | ||||||
|  |         if not addon.need_build: | ||||||
|  |             _LOGGER.error("Can't rebuild a image based add-on") | ||||||
|  |             raise AddonsNotSupportedError() | ||||||
|  |  | ||||||
|  |         # remove docker container but not addon config | ||||||
|  |         last_state = await addon.state() | ||||||
|  |         try: | ||||||
|  |             await addon.instance.remove() | ||||||
|  |             await addon.instance.install(addon.version) | ||||||
|  |         except DockerAPIError: | ||||||
|  |             raise AddonsError() from None | ||||||
|  |         else: | ||||||
|  |             self.data.update(store) | ||||||
|  |             _LOGGER.info("Add-on '%s' successfully rebuilt", slug) | ||||||
|  |  | ||||||
|  |         # restore state | ||||||
|  |         if last_state == STATE_STARTED: | ||||||
|  |             await addon.start() | ||||||
|  |  | ||||||
|  |     async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None: | ||||||
|  |         """Restore state of an add-on.""" | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.debug("Add-on %s is not local available for restore", slug) | ||||||
|  |             addon = Addon(self.coresys, slug) | ||||||
|  |         else: | ||||||
|  |             _LOGGER.debug("Add-on %s is local available for restore", slug) | ||||||
|  |             addon = self.local[slug] | ||||||
|  |  | ||||||
|  |         await addon.restore(tar_file) | ||||||
|  |  | ||||||
|  |         # Check if new | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.info("Detect new Add-on after restore %s", slug) | ||||||
|  |             self.local[slug] = addon | ||||||
|  |  | ||||||
|  |         # Update ingress | ||||||
|  |         if addon.with_ingress: | ||||||
|  |             with suppress(HomeAssistantAPIError): | ||||||
|  |                 await self.sys_ingress.update_hass_panel(addon) | ||||||
|  |  | ||||||
|  |     async def repair(self) -> None: | ||||||
|  |         """Repair local add-ons.""" | ||||||
|  |         needs_repair: List[Addon] = [] | ||||||
|  |  | ||||||
|  |         # Evaluate Add-ons to repair | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if await addon.instance.exists(): | ||||||
|  |                 continue | ||||||
|  |             needs_repair.append(addon) | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Found %d add-ons to repair", len(needs_repair)) | ||||||
|  |         if not needs_repair: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         for addon in needs_repair: | ||||||
|  |             _LOGGER.info("Start repair for add-on: %s", addon.slug) | ||||||
|  |             await self.sys_run_in_executor( | ||||||
|  |                 self.sys_docker.network.stale_cleanup, addon.instance.name | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |             with suppress(DockerAPIError, KeyError): | ||||||
|  |                 # Need pull a image again | ||||||
|  |                 if not addon.need_build: | ||||||
|  |                     await addon.instance.install(addon.version, addon.image) | ||||||
|  |                     continue | ||||||
|  |  | ||||||
|  |                 # Need local lookup | ||||||
|  |                 if addon.need_build and not addon.is_detached: | ||||||
|  |                     store = self.store[addon.slug] | ||||||
|  |                     # If this add-on is available for rebuild | ||||||
|  |                     if addon.version == store.version: | ||||||
|  |                         await addon.instance.install(addon.version, addon.image) | ||||||
|  |                         continue | ||||||
|  |  | ||||||
|  |             _LOGGER.error("Can't repair %s", addon.slug) | ||||||
|  |             with suppress(AddonsError): | ||||||
|  |                 await self.uninstall(addon.slug) | ||||||
|  |  | ||||||
|  |     async def sync_dns(self) -> None: | ||||||
|  |         """Sync add-ons DNS names.""" | ||||||
|  |         # Update hosts | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if not await addon.instance.is_running(): | ||||||
|  |                 continue | ||||||
|  |             self.sys_plugins.dns.add_host( | ||||||
|  |                 ipv4=addon.ip_address, names=[addon.hostname], write=False | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Write hosts files | ||||||
|  |         with suppress(CoreDNSError): | ||||||
|  |             self.sys_plugins.dns.write_hosts() | ||||||
							
								
								
									
										684
									
								
								supervisor/addons/addon.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										684
									
								
								supervisor/addons/addon.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,684 @@ | |||||||
|  | """Init file for Supervisor add-ons.""" | ||||||
|  | from contextlib import suppress | ||||||
|  | from copy import deepcopy | ||||||
|  | from ipaddress import IPv4Address | ||||||
|  | import logging | ||||||
|  | from pathlib import Path, PurePath | ||||||
|  | import re | ||||||
|  | import secrets | ||||||
|  | import shutil | ||||||
|  | import tarfile | ||||||
|  | from tempfile import TemporaryDirectory | ||||||
|  | from typing import Any, Awaitable, Dict, List, Optional | ||||||
|  |  | ||||||
|  | import voluptuous as vol | ||||||
|  | from voluptuous.humanize import humanize_error | ||||||
|  |  | ||||||
|  | from ..const import ( | ||||||
|  |     ATTR_ACCESS_TOKEN, | ||||||
|  |     ATTR_AUDIO_INPUT, | ||||||
|  |     ATTR_AUDIO_OUTPUT, | ||||||
|  |     ATTR_AUTO_UPDATE, | ||||||
|  |     ATTR_BOOT, | ||||||
|  |     ATTR_IMAGE, | ||||||
|  |     ATTR_INGRESS_ENTRY, | ||||||
|  |     ATTR_INGRESS_PANEL, | ||||||
|  |     ATTR_INGRESS_PORT, | ||||||
|  |     ATTR_INGRESS_TOKEN, | ||||||
|  |     ATTR_NETWORK, | ||||||
|  |     ATTR_OPTIONS, | ||||||
|  |     ATTR_PORTS, | ||||||
|  |     ATTR_PROTECTED, | ||||||
|  |     ATTR_SCHEMA, | ||||||
|  |     ATTR_STATE, | ||||||
|  |     ATTR_SYSTEM, | ||||||
|  |     ATTR_USER, | ||||||
|  |     ATTR_UUID, | ||||||
|  |     ATTR_VERSION, | ||||||
|  |     DNS_SUFFIX, | ||||||
|  |     STATE_STARTED, | ||||||
|  |     STATE_STOPPED, | ||||||
|  | ) | ||||||
|  | from ..coresys import CoreSys | ||||||
|  | from ..docker.addon import DockerAddon | ||||||
|  | from ..docker.stats import DockerStats | ||||||
|  | from ..exceptions import ( | ||||||
|  |     AddonsError, | ||||||
|  |     AddonsNotSupportedError, | ||||||
|  |     DockerAPIError, | ||||||
|  |     HostAppArmorError, | ||||||
|  |     JsonFileError, | ||||||
|  | ) | ||||||
|  | from ..utils.apparmor import adjust_profile | ||||||
|  | from ..utils.json import read_json_file, write_json_file | ||||||
|  | from ..utils.tar import exclude_filter, secure_path | ||||||
|  | from .model import AddonModel, Data | ||||||
|  | from .utils import remove_data | ||||||
|  | from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | RE_WEBUI = re.compile( | ||||||
|  |     r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])" | ||||||
|  |     r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | RE_OLD_AUDIO = re.compile(r"\d+,\d+") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Addon(AddonModel): | ||||||
|  |     """Hold data for add-on inside Supervisor.""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coresys: CoreSys, slug: str): | ||||||
|  |         """Initialize data holder.""" | ||||||
|  |         self.coresys: CoreSys = coresys | ||||||
|  |         self.instance: DockerAddon = DockerAddon(coresys, self) | ||||||
|  |         self.slug: str = slug | ||||||
|  |  | ||||||
|  |     async def load(self) -> None: | ||||||
|  |         """Async initialize of object.""" | ||||||
|  |         with suppress(DockerAPIError): | ||||||
|  |             await self.instance.attach(tag=self.version) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ip_address(self) -> IPv4Address: | ||||||
|  |         """Return IP of Add-on instance.""" | ||||||
|  |         return self.instance.ip_address | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def data(self) -> Data: | ||||||
|  |         """Return add-on data/config.""" | ||||||
|  |         return self.sys_addons.data.system[self.slug] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def data_store(self) -> Data: | ||||||
|  |         """Return add-on data from store.""" | ||||||
|  |         return self.sys_store.data.addons.get(self.slug, self.data) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def persist(self) -> Data: | ||||||
|  |         """Return add-on data/config.""" | ||||||
|  |         return self.sys_addons.data.user[self.slug] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def is_installed(self) -> bool: | ||||||
|  |         """Return True if an add-on is installed.""" | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def is_detached(self) -> bool: | ||||||
|  |         """Return True if add-on is detached.""" | ||||||
|  |         return self.slug not in self.sys_store.data.addons | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def available(self) -> bool: | ||||||
|  |         """Return True if this add-on is available on this platform.""" | ||||||
|  |         return self._available(self.data_store) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def version(self) -> Optional[str]: | ||||||
|  |         """Return installed version.""" | ||||||
|  |         return self.persist[ATTR_VERSION] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def dns(self) -> List[str]: | ||||||
|  |         """Return list of DNS name for that add-on.""" | ||||||
|  |         return [f"{self.hostname}.{DNS_SUFFIX}"] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def options(self) -> Dict[str, Any]: | ||||||
|  |         """Return options with local changes.""" | ||||||
|  |         return {**self.data[ATTR_OPTIONS], **self.persist[ATTR_OPTIONS]} | ||||||
|  |  | ||||||
|  |     @options.setter | ||||||
|  |     def options(self, value: Optional[Dict[str, Any]]): | ||||||
|  |         """Store user add-on options.""" | ||||||
|  |         if value is None: | ||||||
|  |             self.persist[ATTR_OPTIONS] = {} | ||||||
|  |         else: | ||||||
|  |             self.persist[ATTR_OPTIONS] = deepcopy(value) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def boot(self) -> bool: | ||||||
|  |         """Return boot config with prio local settings.""" | ||||||
|  |         return self.persist.get(ATTR_BOOT, super().boot) | ||||||
|  |  | ||||||
|  |     @boot.setter | ||||||
|  |     def boot(self, value: bool): | ||||||
|  |         """Store user boot options.""" | ||||||
|  |         self.persist[ATTR_BOOT] = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def auto_update(self) -> bool: | ||||||
|  |         """Return if auto update is enable.""" | ||||||
|  |         return self.persist.get(ATTR_AUTO_UPDATE, super().auto_update) | ||||||
|  |  | ||||||
|  |     @auto_update.setter | ||||||
|  |     def auto_update(self, value: bool): | ||||||
|  |         """Set auto update.""" | ||||||
|  |         self.persist[ATTR_AUTO_UPDATE] = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def uuid(self) -> str: | ||||||
|  |         """Return an API token for this add-on.""" | ||||||
|  |         return self.persist[ATTR_UUID] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def supervisor_token(self) -> Optional[str]: | ||||||
|  |         """Return access token for Supervisor API.""" | ||||||
|  |         return self.persist.get(ATTR_ACCESS_TOKEN) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_token(self) -> Optional[str]: | ||||||
|  |         """Return access token for Supervisor API.""" | ||||||
|  |         return self.persist.get(ATTR_INGRESS_TOKEN) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_entry(self) -> Optional[str]: | ||||||
|  |         """Return ingress external URL.""" | ||||||
|  |         if self.with_ingress: | ||||||
|  |             return f"/api/hassio_ingress/{self.ingress_token}" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def latest_version(self) -> str: | ||||||
|  |         """Return version of add-on.""" | ||||||
|  |         return self.data_store[ATTR_VERSION] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def protected(self) -> bool: | ||||||
|  |         """Return if add-on is in protected mode.""" | ||||||
|  |         return self.persist[ATTR_PROTECTED] | ||||||
|  |  | ||||||
|  |     @protected.setter | ||||||
|  |     def protected(self, value: bool): | ||||||
|  |         """Set add-on in protected mode.""" | ||||||
|  |         self.persist[ATTR_PROTECTED] = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ports(self) -> Optional[Dict[str, Optional[int]]]: | ||||||
|  |         """Return ports of add-on.""" | ||||||
|  |         return self.persist.get(ATTR_NETWORK, super().ports) | ||||||
|  |  | ||||||
|  |     @ports.setter | ||||||
|  |     def ports(self, value: Optional[Dict[str, Optional[int]]]): | ||||||
|  |         """Set custom ports of add-on.""" | ||||||
|  |         if value is None: | ||||||
|  |             self.persist.pop(ATTR_NETWORK, None) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Secure map ports to value | ||||||
|  |         new_ports = {} | ||||||
|  |         for container_port, host_port in value.items(): | ||||||
|  |             if container_port in self.data.get(ATTR_PORTS, {}): | ||||||
|  |                 new_ports[container_port] = host_port | ||||||
|  |  | ||||||
|  |         self.persist[ATTR_NETWORK] = new_ports | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_url(self) -> Optional[str]: | ||||||
|  |         """Return URL to ingress url.""" | ||||||
|  |         if not self.with_ingress: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|  |         url = f"/api/hassio_ingress/{self.ingress_token}/" | ||||||
|  |         if ATTR_INGRESS_ENTRY in self.data: | ||||||
|  |             return f"{url}{self.data[ATTR_INGRESS_ENTRY]}" | ||||||
|  |         return url | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def webui(self) -> Optional[str]: | ||||||
|  |         """Return URL to webui or None.""" | ||||||
|  |         url = super().webui | ||||||
|  |         if not url: | ||||||
|  |             return None | ||||||
|  |         webui = RE_WEBUI.match(url) | ||||||
|  |  | ||||||
|  |         # extract arguments | ||||||
|  |         t_port = webui.group("t_port") | ||||||
|  |         t_proto = webui.group("t_proto") | ||||||
|  |         s_prefix = webui.group("s_prefix") or "" | ||||||
|  |         s_suffix = webui.group("s_suffix") or "" | ||||||
|  |  | ||||||
|  |         # search host port for this docker port | ||||||
|  |         if self.ports is None: | ||||||
|  |             port = t_port | ||||||
|  |         else: | ||||||
|  |             port = self.ports.get(f"{t_port}/tcp", t_port) | ||||||
|  |  | ||||||
|  |         # for interface config or port lists | ||||||
|  |         if isinstance(port, (tuple, list)): | ||||||
|  |             port = port[-1] | ||||||
|  |  | ||||||
|  |         # lookup the correct protocol from config | ||||||
|  |         if t_proto: | ||||||
|  |             proto = "https" if self.options.get(t_proto) else "http" | ||||||
|  |         else: | ||||||
|  |             proto = s_prefix | ||||||
|  |  | ||||||
|  |         return f"{proto}://[HOST]:{port}{s_suffix}" | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_port(self) -> Optional[int]: | ||||||
|  |         """Return Ingress port.""" | ||||||
|  |         if not self.with_ingress: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|  |         port = self.data[ATTR_INGRESS_PORT] | ||||||
|  |         if port == 0: | ||||||
|  |             return self.sys_ingress.get_dynamic_port(self.slug) | ||||||
|  |         return port | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_panel(self) -> Optional[bool]: | ||||||
|  |         """Return True if the add-on access support ingress.""" | ||||||
|  |         return self.persist[ATTR_INGRESS_PANEL] | ||||||
|  |  | ||||||
|  |     @ingress_panel.setter | ||||||
|  |     def ingress_panel(self, value: bool): | ||||||
|  |         """Return True if the add-on access support ingress.""" | ||||||
|  |         self.persist[ATTR_INGRESS_PANEL] = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def audio_output(self) -> Optional[str]: | ||||||
|  |         """Return a pulse profile for output or None.""" | ||||||
|  |         if not self.with_audio: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|  |         # Fallback with old audio settings | ||||||
|  |         # Remove after 210 | ||||||
|  |         output_data = self.persist.get(ATTR_AUDIO_OUTPUT) | ||||||
|  |         if output_data and RE_OLD_AUDIO.fullmatch(output_data): | ||||||
|  |             return None | ||||||
|  |         return output_data | ||||||
|  |  | ||||||
|  |     @audio_output.setter | ||||||
|  |     def audio_output(self, value: Optional[str]): | ||||||
|  |         """Set audio output profile settings.""" | ||||||
|  |         self.persist[ATTR_AUDIO_OUTPUT] = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def audio_input(self) -> Optional[str]: | ||||||
|  |         """Return pulse profile for input or None.""" | ||||||
|  |         if not self.with_audio: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|  |         # Fallback with old audio settings | ||||||
|  |         # Remove after 210 | ||||||
|  |         input_data = self.persist.get(ATTR_AUDIO_INPUT) | ||||||
|  |         if input_data and RE_OLD_AUDIO.fullmatch(input_data): | ||||||
|  |             return None | ||||||
|  |         return input_data | ||||||
|  |  | ||||||
|  |     @audio_input.setter | ||||||
|  |     def audio_input(self, value: Optional[str]): | ||||||
|  |         """Set audio input settings.""" | ||||||
|  |         self.persist[ATTR_AUDIO_INPUT] = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def image(self): | ||||||
|  |         """Return image name of add-on.""" | ||||||
|  |         return self.persist.get(ATTR_IMAGE) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def need_build(self): | ||||||
|  |         """Return True if this  add-on need a local build.""" | ||||||
|  |         return ATTR_IMAGE not in self.data | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_data(self): | ||||||
|  |         """Return add-on data path inside Supervisor.""" | ||||||
|  |         return Path(self.sys_config.path_addons_data, self.slug) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_extern_data(self): | ||||||
|  |         """Return add-on data path external for Docker.""" | ||||||
|  |         return PurePath(self.sys_config.path_extern_addons_data, self.slug) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_options(self): | ||||||
|  |         """Return path to add-on options.""" | ||||||
|  |         return Path(self.path_data, "options.json") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_pulse(self): | ||||||
|  |         """Return path to asound config.""" | ||||||
|  |         return Path(self.sys_config.path_tmp, f"{self.slug}_pulse") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_extern_pulse(self): | ||||||
|  |         """Return path to asound config for Docker.""" | ||||||
|  |         return Path(self.sys_config.path_extern_tmp, f"{self.slug}_pulse") | ||||||
|  |  | ||||||
|  |     def save_persist(self): | ||||||
|  |         """Save data of add-on.""" | ||||||
|  |         self.sys_addons.data.save_data() | ||||||
|  |  | ||||||
|  |     async def write_options(self): | ||||||
|  |         """Return True if add-on options is written to data.""" | ||||||
|  |         schema = self.schema | ||||||
|  |         options = self.options | ||||||
|  |  | ||||||
|  |         # Update secrets for validation | ||||||
|  |         await self.sys_secrets.reload() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             options = schema(options) | ||||||
|  |             write_json_file(self.path_options, options) | ||||||
|  |         except vol.Invalid as ex: | ||||||
|  |             _LOGGER.error( | ||||||
|  |                 "Add-on %s have wrong options: %s", | ||||||
|  |                 self.slug, | ||||||
|  |                 humanize_error(options, ex), | ||||||
|  |             ) | ||||||
|  |         except JsonFileError: | ||||||
|  |             _LOGGER.error("Add-on %s can't write options", self.slug) | ||||||
|  |         else: | ||||||
|  |             _LOGGER.debug("Add-on %s write options: %s", self.slug, options) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         raise AddonsError() | ||||||
|  |  | ||||||
|  |     async def remove_data(self): | ||||||
|  |         """Remove add-on data.""" | ||||||
|  |         if not self.path_data.is_dir(): | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Remove add-on data folder %s", self.path_data) | ||||||
|  |         await remove_data(self.path_data) | ||||||
|  |  | ||||||
|  |     def write_pulse(self): | ||||||
|  |         """Write asound config to file and return True on success.""" | ||||||
|  |         pulse_config = self.sys_plugins.audio.pulse_client( | ||||||
|  |             input_profile=self.audio_input, output_profile=self.audio_output | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         # Cleanup wrong maps | ||||||
|  |         if self.path_pulse.is_dir(): | ||||||
|  |             shutil.rmtree(self.path_pulse, ignore_errors=True) | ||||||
|  |  | ||||||
|  |         # Write pulse config | ||||||
|  |         try: | ||||||
|  |             with self.path_pulse.open("w") as config_file: | ||||||
|  |                 config_file.write(pulse_config) | ||||||
|  |         except OSError as err: | ||||||
|  |             _LOGGER.error( | ||||||
|  |                 "Add-on %s can't write pulse/client.config: %s", self.slug, err | ||||||
|  |             ) | ||||||
|  |         else: | ||||||
|  |             _LOGGER.debug( | ||||||
|  |                 "Add-on %s write pulse/client.config: %s", self.slug, self.path_pulse | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     async def install_apparmor(self) -> None: | ||||||
|  |         """Install or Update AppArmor profile for Add-on.""" | ||||||
|  |         exists_local = self.sys_host.apparmor.exists(self.slug) | ||||||
|  |         exists_addon = self.path_apparmor.exists() | ||||||
|  |  | ||||||
|  |         # Nothing to do | ||||||
|  |         if not exists_local and not exists_addon: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Need removed | ||||||
|  |         if exists_local and not exists_addon: | ||||||
|  |             await self.sys_host.apparmor.remove_profile(self.slug) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Need install/update | ||||||
|  |         with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder: | ||||||
|  |             profile_file = Path(tmp_folder, "apparmor.txt") | ||||||
|  |  | ||||||
|  |             adjust_profile(self.slug, self.path_apparmor, profile_file) | ||||||
|  |             await self.sys_host.apparmor.load_profile(self.slug, profile_file) | ||||||
|  |  | ||||||
|  |     async def uninstall_apparmor(self) -> None: | ||||||
|  |         """Remove AppArmor profile for Add-on.""" | ||||||
|  |         if not self.sys_host.apparmor.exists(self.slug): | ||||||
|  |             return | ||||||
|  |         await self.sys_host.apparmor.remove_profile(self.slug) | ||||||
|  |  | ||||||
|  |     def test_update_schema(self) -> bool: | ||||||
|  |         """Check if the existing configuration is valid after update.""" | ||||||
|  |         # load next schema | ||||||
|  |         new_raw_schema = self.data_store[ATTR_SCHEMA] | ||||||
|  |         default_options = self.data_store[ATTR_OPTIONS] | ||||||
|  |  | ||||||
|  |         # if disabled | ||||||
|  |         if isinstance(new_raw_schema, bool): | ||||||
|  |             return True | ||||||
|  |  | ||||||
|  |         # merge options | ||||||
|  |         options = {**self.persist[ATTR_OPTIONS], **default_options} | ||||||
|  |  | ||||||
|  |         # create voluptuous | ||||||
|  |         new_schema = vol.Schema( | ||||||
|  |             vol.All(dict, validate_options(self.coresys, new_raw_schema)) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         # validate | ||||||
|  |         try: | ||||||
|  |             new_schema(options) | ||||||
|  |         except vol.Invalid: | ||||||
|  |             _LOGGER.warning("Add-on %s new schema is not compatible", self.slug) | ||||||
|  |             return False | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |     async def state(self) -> str: | ||||||
|  |         """Return running state of add-on.""" | ||||||
|  |         if await self.instance.is_running(): | ||||||
|  |             return STATE_STARTED | ||||||
|  |         return STATE_STOPPED | ||||||
|  |  | ||||||
|  |     async def start(self) -> None: | ||||||
|  |         """Set options and start add-on.""" | ||||||
|  |         if await self.instance.is_running(): | ||||||
|  |             _LOGGER.warning("%s already running!", self.slug) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Access Token | ||||||
|  |         self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56) | ||||||
|  |         self.save_persist() | ||||||
|  |  | ||||||
|  |         # Options | ||||||
|  |         await self.write_options() | ||||||
|  |  | ||||||
|  |         # Sound | ||||||
|  |         if self.with_audio: | ||||||
|  |             self.write_pulse() | ||||||
|  |  | ||||||
|  |         # Start Add-on | ||||||
|  |         try: | ||||||
|  |             await self.instance.run() | ||||||
|  |         except DockerAPIError: | ||||||
|  |             raise AddonsError() from None | ||||||
|  |  | ||||||
|  |     async def stop(self) -> None: | ||||||
|  |         """Stop add-on.""" | ||||||
|  |         try: | ||||||
|  |             return await self.instance.stop() | ||||||
|  |         except DockerAPIError: | ||||||
|  |             raise AddonsError() from None | ||||||
|  |  | ||||||
|  |     async def restart(self) -> None: | ||||||
|  |         """Restart add-on.""" | ||||||
|  |         with suppress(AddonsError): | ||||||
|  |             await self.stop() | ||||||
|  |         await self.start() | ||||||
|  |  | ||||||
|  |     def logs(self) -> Awaitable[bytes]: | ||||||
|  |         """Return add-ons log output. | ||||||
|  |  | ||||||
|  |         Return a coroutine. | ||||||
|  |         """ | ||||||
|  |         return self.instance.logs() | ||||||
|  |  | ||||||
|  |     async def stats(self) -> DockerStats: | ||||||
|  |         """Return stats of container.""" | ||||||
|  |         try: | ||||||
|  |             return await self.instance.stats() | ||||||
|  |         except DockerAPIError: | ||||||
|  |             raise AddonsError() from None | ||||||
|  |  | ||||||
|  |     async def write_stdin(self, data): | ||||||
|  |         """Write data to add-on stdin. | ||||||
|  |  | ||||||
|  |         Return a coroutine. | ||||||
|  |         """ | ||||||
|  |         if not self.with_stdin: | ||||||
|  |             _LOGGER.error("Add-on don't support write to stdin!") | ||||||
|  |             raise AddonsNotSupportedError() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             return await self.instance.write_stdin(data) | ||||||
|  |         except DockerAPIError: | ||||||
|  |             raise AddonsError() from None | ||||||
|  |  | ||||||
|  |     async def snapshot(self, tar_file: tarfile.TarFile) -> None: | ||||||
|  |         """Snapshot state of an add-on.""" | ||||||
|  |         with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp: | ||||||
|  |             # store local image | ||||||
|  |             if self.need_build: | ||||||
|  |                 try: | ||||||
|  |                     await self.instance.export_image(Path(temp, "image.tar")) | ||||||
|  |                 except DockerAPIError: | ||||||
|  |                     raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             data = { | ||||||
|  |                 ATTR_USER: self.persist, | ||||||
|  |                 ATTR_SYSTEM: self.data, | ||||||
|  |                 ATTR_VERSION: self.version, | ||||||
|  |                 ATTR_STATE: await self.state(), | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             # Store local configs/state | ||||||
|  |             try: | ||||||
|  |                 write_json_file(Path(temp, "addon.json"), data) | ||||||
|  |             except JsonFileError: | ||||||
|  |                 _LOGGER.error("Can't save meta for %s", self.slug) | ||||||
|  |                 raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             # Store AppArmor Profile | ||||||
|  |             if self.sys_host.apparmor.exists(self.slug): | ||||||
|  |                 profile = Path(temp, "apparmor.txt") | ||||||
|  |                 try: | ||||||
|  |                     self.sys_host.apparmor.backup_profile(self.slug, profile) | ||||||
|  |                 except HostAppArmorError: | ||||||
|  |                     _LOGGER.error("Can't backup AppArmor profile") | ||||||
|  |                     raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             # write into tarfile | ||||||
|  |             def _write_tarfile(): | ||||||
|  |                 """Write tar inside loop.""" | ||||||
|  |                 with tar_file as snapshot: | ||||||
|  |                     # Snapshot system | ||||||
|  |                     snapshot.add(temp, arcname=".") | ||||||
|  |  | ||||||
|  |                     # Snapshot data | ||||||
|  |                     snapshot.add( | ||||||
|  |                         self.path_data, | ||||||
|  |                         arcname="data", | ||||||
|  |                         filter=exclude_filter(self.snapshot_exclude), | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|  |             try: | ||||||
|  |                 _LOGGER.info("Build snapshot for add-on %s", self.slug) | ||||||
|  |                 await self.sys_run_in_executor(_write_tarfile) | ||||||
|  |             except (tarfile.TarError, OSError) as err: | ||||||
|  |                 _LOGGER.error("Can't write tarfile %s: %s", tar_file, err) | ||||||
|  |                 raise AddonsError() from None | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Finish snapshot for addon %s", self.slug) | ||||||
|  |  | ||||||
|  |     async def restore(self, tar_file: tarfile.TarFile) -> None: | ||||||
|  |         """Restore state of an add-on.""" | ||||||
|  |         with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp: | ||||||
|  |             # extract snapshot | ||||||
|  |             def _extract_tarfile(): | ||||||
|  |                 """Extract tar snapshot.""" | ||||||
|  |                 with tar_file as snapshot: | ||||||
|  |                     snapshot.extractall(path=Path(temp), members=secure_path(snapshot)) | ||||||
|  |  | ||||||
|  |             try: | ||||||
|  |                 await self.sys_run_in_executor(_extract_tarfile) | ||||||
|  |             except tarfile.TarError as err: | ||||||
|  |                 _LOGGER.error("Can't read tarfile %s: %s", tar_file, err) | ||||||
|  |                 raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             # Read snapshot data | ||||||
|  |             try: | ||||||
|  |                 data = read_json_file(Path(temp, "addon.json")) | ||||||
|  |             except JsonFileError: | ||||||
|  |                 raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             # Validate | ||||||
|  |             try: | ||||||
|  |                 data = SCHEMA_ADDON_SNAPSHOT(data) | ||||||
|  |             except vol.Invalid as err: | ||||||
|  |                 _LOGGER.error( | ||||||
|  |                     "Can't validate %s, snapshot data: %s", | ||||||
|  |                     self.slug, | ||||||
|  |                     humanize_error(data, err), | ||||||
|  |                 ) | ||||||
|  |                 raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             # If available | ||||||
|  |             if not self._available(data[ATTR_SYSTEM]): | ||||||
|  |                 _LOGGER.error("Add-on %s is not available for this Platform", self.slug) | ||||||
|  |                 raise AddonsNotSupportedError() | ||||||
|  |  | ||||||
|  |             # Restore local add-on informations | ||||||
|  |             _LOGGER.info("Restore config for addon %s", self.slug) | ||||||
|  |             restore_image = self._image(data[ATTR_SYSTEM]) | ||||||
|  |             self.sys_addons.data.restore( | ||||||
|  |                 self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |             # Check version / restore image | ||||||
|  |             version = data[ATTR_VERSION] | ||||||
|  |             if not await self.instance.exists(): | ||||||
|  |                 _LOGGER.info("Restore/Install image for addon %s", self.slug) | ||||||
|  |  | ||||||
|  |                 image_file = Path(temp, "image.tar") | ||||||
|  |                 if image_file.is_file(): | ||||||
|  |                     with suppress(DockerAPIError): | ||||||
|  |                         await self.instance.import_image(image_file) | ||||||
|  |                 else: | ||||||
|  |                     with suppress(DockerAPIError): | ||||||
|  |                         await self.instance.install(version, restore_image) | ||||||
|  |                         await self.instance.cleanup() | ||||||
|  |             elif self.instance.version != version or self.legacy: | ||||||
|  |                 _LOGGER.info("Restore/Update image for addon %s", self.slug) | ||||||
|  |                 with suppress(DockerAPIError): | ||||||
|  |                     await self.instance.update(version, restore_image) | ||||||
|  |             else: | ||||||
|  |                 with suppress(DockerAPIError): | ||||||
|  |                     await self.instance.stop() | ||||||
|  |  | ||||||
|  |             # Restore data | ||||||
|  |             def _restore_data(): | ||||||
|  |                 """Restore data.""" | ||||||
|  |                 shutil.copytree(Path(temp, "data"), self.path_data) | ||||||
|  |  | ||||||
|  |             _LOGGER.info("Restore data for addon %s", self.slug) | ||||||
|  |             if self.path_data.is_dir(): | ||||||
|  |                 await remove_data(self.path_data) | ||||||
|  |             try: | ||||||
|  |                 await self.sys_run_in_executor(_restore_data) | ||||||
|  |             except shutil.Error as err: | ||||||
|  |                 _LOGGER.error("Can't restore origin data: %s", err) | ||||||
|  |                 raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             # Restore AppArmor | ||||||
|  |             profile_file = Path(temp, "apparmor.txt") | ||||||
|  |             if profile_file.exists(): | ||||||
|  |                 try: | ||||||
|  |                     await self.sys_host.apparmor.load_profile(self.slug, profile_file) | ||||||
|  |                 except HostAppArmorError: | ||||||
|  |                     _LOGGER.error("Can't restore AppArmor profile") | ||||||
|  |                     raise AddonsError() from None | ||||||
|  |  | ||||||
|  |             # Run add-on | ||||||
|  |             if data[ATTR_STATE] == STATE_STARTED: | ||||||
|  |                 return await self.start() | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Finish restore for add-on %s", self.slug) | ||||||
							
								
								
									
										79
									
								
								supervisor/addons/build.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								supervisor/addons/build.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,79 @@ | |||||||
|  | """Supervisor add-on build environment.""" | ||||||
|  | from __future__ import annotations | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import TYPE_CHECKING, Dict | ||||||
|  |  | ||||||
|  | from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON | ||||||
|  | from ..coresys import CoreSys, CoreSysAttributes | ||||||
|  | from ..utils.json import JsonConfig | ||||||
|  | from .validate import SCHEMA_BUILD_CONFIG | ||||||
|  |  | ||||||
|  | if TYPE_CHECKING: | ||||||
|  |     from . import AnyAddon | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AddonBuild(JsonConfig, CoreSysAttributes): | ||||||
|  |     """Handle build options for add-ons.""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None: | ||||||
|  |         """Initialize Supervisor add-on builder.""" | ||||||
|  |         self.coresys: CoreSys = coresys | ||||||
|  |         self.addon = addon | ||||||
|  |  | ||||||
|  |         super().__init__( | ||||||
|  |             Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def save_data(self): | ||||||
|  |         """Ignore save function.""" | ||||||
|  |         raise RuntimeError() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def base_image(self) -> str: | ||||||
|  |         """Base images for this add-on.""" | ||||||
|  |         return self._data[ATTR_BUILD_FROM].get( | ||||||
|  |             self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest" | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def squash(self) -> bool: | ||||||
|  |         """Return True or False if squash is active.""" | ||||||
|  |         return self._data[ATTR_SQUASH] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def additional_args(self) -> Dict[str, str]: | ||||||
|  |         """Return additional Docker build arguments.""" | ||||||
|  |         return self._data[ATTR_ARGS] | ||||||
|  |  | ||||||
|  |     def get_docker_args(self, version): | ||||||
|  |         """Create a dict with Docker build arguments.""" | ||||||
|  |         args = { | ||||||
|  |             "path": str(self.addon.path_location), | ||||||
|  |             "tag": f"{self.addon.image}:{version}", | ||||||
|  |             "pull": True, | ||||||
|  |             "forcerm": True, | ||||||
|  |             "squash": self.squash, | ||||||
|  |             "labels": { | ||||||
|  |                 "io.hass.version": version, | ||||||
|  |                 "io.hass.arch": self.sys_arch.default, | ||||||
|  |                 "io.hass.type": META_ADDON, | ||||||
|  |                 "io.hass.name": self._fix_label("name"), | ||||||
|  |                 "io.hass.description": self._fix_label("description"), | ||||||
|  |             }, | ||||||
|  |             "buildargs": { | ||||||
|  |                 "BUILD_FROM": self.base_image, | ||||||
|  |                 "BUILD_VERSION": version, | ||||||
|  |                 "BUILD_ARCH": self.sys_arch.default, | ||||||
|  |                 **self.additional_args, | ||||||
|  |             }, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         if self.addon.url: | ||||||
|  |             args["labels"]["io.hass.url"] = self.addon.url | ||||||
|  |  | ||||||
|  |         return args | ||||||
|  |  | ||||||
|  |     def _fix_label(self, label_name: str) -> str: | ||||||
|  |         """Remove characters they are not supported.""" | ||||||
|  |         label = getattr(self.addon, label_name, "") | ||||||
|  |         return label.replace("'", "") | ||||||
							
								
								
									
										73
									
								
								supervisor/addons/data.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								supervisor/addons/data.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,73 @@ | |||||||
|  | """Init file for Supervisor add-on data.""" | ||||||
|  | from copy import deepcopy | ||||||
|  | import logging | ||||||
|  | from typing import Any, Dict | ||||||
|  |  | ||||||
|  | from ..const import ( | ||||||
|  |     ATTR_IMAGE, | ||||||
|  |     ATTR_OPTIONS, | ||||||
|  |     ATTR_SYSTEM, | ||||||
|  |     ATTR_USER, | ||||||
|  |     ATTR_VERSION, | ||||||
|  |     FILE_HASSIO_ADDONS, | ||||||
|  | ) | ||||||
|  | from ..coresys import CoreSys, CoreSysAttributes | ||||||
|  | from ..utils.json import JsonConfig | ||||||
|  | from ..store.addon import AddonStore | ||||||
|  | from .addon import Addon | ||||||
|  | from .validate import SCHEMA_ADDONS_FILE | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | Config = Dict[str, Any] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AddonsData(JsonConfig, CoreSysAttributes): | ||||||
|  |     """Hold data for installed Add-ons inside Supervisor.""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coresys: CoreSys): | ||||||
|  |         """Initialize data holder.""" | ||||||
|  |         super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE) | ||||||
|  |         self.coresys: CoreSys = coresys | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def user(self): | ||||||
|  |         """Return local add-on user data.""" | ||||||
|  |         return self._data[ATTR_USER] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def system(self): | ||||||
|  |         """Return local add-on data.""" | ||||||
|  |         return self._data[ATTR_SYSTEM] | ||||||
|  |  | ||||||
|  |     def install(self, addon: AddonStore) -> None: | ||||||
|  |         """Set addon as installed.""" | ||||||
|  |         self.system[addon.slug] = deepcopy(addon.data) | ||||||
|  |         self.user[addon.slug] = { | ||||||
|  |             ATTR_OPTIONS: {}, | ||||||
|  |             ATTR_VERSION: addon.version, | ||||||
|  |             ATTR_IMAGE: addon.image, | ||||||
|  |         } | ||||||
|  |         self.save_data() | ||||||
|  |  | ||||||
|  |     def uninstall(self, addon: Addon) -> None: | ||||||
|  |         """Set add-on as uninstalled.""" | ||||||
|  |         self.system.pop(addon.slug, None) | ||||||
|  |         self.user.pop(addon.slug, None) | ||||||
|  |         self.save_data() | ||||||
|  |  | ||||||
|  |     def update(self, addon: AddonStore) -> None: | ||||||
|  |         """Update version of add-on.""" | ||||||
|  |         self.system[addon.slug] = deepcopy(addon.data) | ||||||
|  |         self.user[addon.slug].update( | ||||||
|  |             {ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image} | ||||||
|  |         ) | ||||||
|  |         self.save_data() | ||||||
|  |  | ||||||
|  |     def restore(self, slug: str, user: Config, system: Config, image: str) -> None: | ||||||
|  |         """Restore data to add-on.""" | ||||||
|  |         self.user[slug] = deepcopy(user) | ||||||
|  |         self.system[slug] = deepcopy(system) | ||||||
|  |  | ||||||
|  |         self.user[slug][ATTR_IMAGE] = image | ||||||
|  |         self.save_data() | ||||||
							
								
								
									
										566
									
								
								supervisor/addons/model.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										566
									
								
								supervisor/addons/model.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,566 @@ | |||||||
|  | """Init file for Supervisor add-ons.""" | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import Any, Awaitable, Dict, List, Optional | ||||||
|  |  | ||||||
|  | from packaging import version as pkg_version | ||||||
|  | import voluptuous as vol | ||||||
|  |  | ||||||
|  | from ..const import ( | ||||||
|  |     ATTR_ADVANCED, | ||||||
|  |     ATTR_APPARMOR, | ||||||
|  |     ATTR_ARCH, | ||||||
|  |     ATTR_AUDIO, | ||||||
|  |     ATTR_AUTH_API, | ||||||
|  |     ATTR_AUTO_UART, | ||||||
|  |     ATTR_BOOT, | ||||||
|  |     ATTR_DESCRIPTON, | ||||||
|  |     ATTR_DEVICES, | ||||||
|  |     ATTR_DEVICETREE, | ||||||
|  |     ATTR_DISCOVERY, | ||||||
|  |     ATTR_DOCKER_API, | ||||||
|  |     ATTR_ENVIRONMENT, | ||||||
|  |     ATTR_FULL_ACCESS, | ||||||
|  |     ATTR_GPIO, | ||||||
|  |     ATTR_HASSIO_API, | ||||||
|  |     ATTR_HASSIO_ROLE, | ||||||
|  |     ATTR_HOMEASSISTANT, | ||||||
|  |     ATTR_HOMEASSISTANT_API, | ||||||
|  |     ATTR_HOST_DBUS, | ||||||
|  |     ATTR_HOST_IPC, | ||||||
|  |     ATTR_HOST_NETWORK, | ||||||
|  |     ATTR_HOST_PID, | ||||||
|  |     ATTR_IMAGE, | ||||||
|  |     ATTR_INGRESS, | ||||||
|  |     ATTR_INIT, | ||||||
|  |     ATTR_KERNEL_MODULES, | ||||||
|  |     ATTR_LEGACY, | ||||||
|  |     ATTR_LOCATON, | ||||||
|  |     ATTR_MACHINE, | ||||||
|  |     ATTR_MAP, | ||||||
|  |     ATTR_NAME, | ||||||
|  |     ATTR_OPTIONS, | ||||||
|  |     ATTR_PANEL_ADMIN, | ||||||
|  |     ATTR_PANEL_ICON, | ||||||
|  |     ATTR_PANEL_TITLE, | ||||||
|  |     ATTR_PORTS, | ||||||
|  |     ATTR_PORTS_DESCRIPTION, | ||||||
|  |     ATTR_PRIVILEGED, | ||||||
|  |     ATTR_REPOSITORY, | ||||||
|  |     ATTR_SCHEMA, | ||||||
|  |     ATTR_SERVICES, | ||||||
|  |     ATTR_SLUG, | ||||||
|  |     ATTR_SNAPSHOT_EXCLUDE, | ||||||
|  |     ATTR_STAGE, | ||||||
|  |     ATTR_STARTUP, | ||||||
|  |     ATTR_STDIN, | ||||||
|  |     ATTR_TIMEOUT, | ||||||
|  |     ATTR_TMPFS, | ||||||
|  |     ATTR_UDEV, | ||||||
|  |     ATTR_URL, | ||||||
|  |     ATTR_VERSION, | ||||||
|  |     ATTR_VIDEO, | ||||||
|  |     ATTR_WEBUI, | ||||||
|  |     SECURITY_DEFAULT, | ||||||
|  |     SECURITY_DISABLE, | ||||||
|  |     SECURITY_PROFILE, | ||||||
|  |     AddonStages, | ||||||
|  | ) | ||||||
|  | from ..coresys import CoreSysAttributes | ||||||
|  | from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options | ||||||
|  |  | ||||||
|  | Data = Dict[str, Any] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AddonModel(CoreSysAttributes): | ||||||
|  |     """Add-on Data layout.""" | ||||||
|  |  | ||||||
|  |     slug: str = None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def data(self) -> Data: | ||||||
|  |         """Return Add-on config/data.""" | ||||||
|  |         raise NotImplementedError() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def is_installed(self) -> bool: | ||||||
|  |         """Return True if an add-on is installed.""" | ||||||
|  |         raise NotImplementedError() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def is_detached(self) -> bool: | ||||||
|  |         """Return True if add-on is detached.""" | ||||||
|  |         raise NotImplementedError() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def available(self) -> bool: | ||||||
|  |         """Return True if this add-on is available on this platform.""" | ||||||
|  |         return self._available(self.data) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def options(self) -> Dict[str, Any]: | ||||||
|  |         """Return options with local changes.""" | ||||||
|  |         return self.data[ATTR_OPTIONS] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def boot(self) -> bool: | ||||||
|  |         """Return boot config with prio local settings.""" | ||||||
|  |         return self.data[ATTR_BOOT] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def auto_update(self) -> Optional[bool]: | ||||||
|  |         """Return if auto update is enable.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def name(self) -> str: | ||||||
|  |         """Return name of add-on.""" | ||||||
|  |         return self.data[ATTR_NAME] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def hostname(self) -> str: | ||||||
|  |         """Return slug/id of add-on.""" | ||||||
|  |         return self.slug.replace("_", "-") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def dns(self) -> List[str]: | ||||||
|  |         """Return list of DNS name for that add-on.""" | ||||||
|  |         return [] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def timeout(self) -> int: | ||||||
|  |         """Return timeout of addon for docker stop.""" | ||||||
|  |         return self.data[ATTR_TIMEOUT] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def uuid(self) -> Optional[str]: | ||||||
|  |         """Return an API token for this add-on.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def supervisor_token(self) -> Optional[str]: | ||||||
|  |         """Return access token for Supervisor API.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_token(self) -> Optional[str]: | ||||||
|  |         """Return access token for Supervisor API.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_entry(self) -> Optional[str]: | ||||||
|  |         """Return ingress external URL.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def description(self) -> str: | ||||||
|  |         """Return description of add-on.""" | ||||||
|  |         return self.data[ATTR_DESCRIPTON] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def long_description(self) -> Optional[str]: | ||||||
|  |         """Return README.md as long_description.""" | ||||||
|  |         readme = Path(self.path_location, "README.md") | ||||||
|  |  | ||||||
|  |         # If readme not exists | ||||||
|  |         if not readme.exists(): | ||||||
|  |             return None | ||||||
|  |  | ||||||
|  |         # Return data | ||||||
|  |         with readme.open("r") as readme_file: | ||||||
|  |             return readme_file.read() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def repository(self) -> str: | ||||||
|  |         """Return repository of add-on.""" | ||||||
|  |         return self.data[ATTR_REPOSITORY] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def latest_version(self) -> str: | ||||||
|  |         """Return latest version of add-on.""" | ||||||
|  |         return self.data[ATTR_VERSION] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def version(self) -> str: | ||||||
|  |         """Return version of add-on.""" | ||||||
|  |         return self.data[ATTR_VERSION] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def protected(self) -> bool: | ||||||
|  |         """Return if add-on is in protected mode.""" | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def startup(self) -> Optional[str]: | ||||||
|  |         """Return startup type of add-on.""" | ||||||
|  |         return self.data.get(ATTR_STARTUP) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def advanced(self) -> bool: | ||||||
|  |         """Return advanced mode of add-on.""" | ||||||
|  |         return self.data[ATTR_ADVANCED] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def stage(self) -> AddonStages: | ||||||
|  |         """Return stage mode of add-on.""" | ||||||
|  |         return self.data[ATTR_STAGE] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def services_role(self) -> Dict[str, str]: | ||||||
|  |         """Return dict of services with rights.""" | ||||||
|  |         services_list = self.data.get(ATTR_SERVICES, []) | ||||||
|  |  | ||||||
|  |         services = {} | ||||||
|  |         for data in services_list: | ||||||
|  |             service = RE_SERVICE.match(data) | ||||||
|  |             services[service.group("service")] = service.group("rights") | ||||||
|  |  | ||||||
|  |         return services | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def discovery(self) -> List[str]: | ||||||
|  |         """Return list of discoverable components/platforms.""" | ||||||
|  |         return self.data.get(ATTR_DISCOVERY, []) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ports_description(self) -> Optional[Dict[str, str]]: | ||||||
|  |         """Return descriptions of ports.""" | ||||||
|  |         return self.data.get(ATTR_PORTS_DESCRIPTION) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ports(self) -> Optional[Dict[str, Optional[int]]]: | ||||||
|  |         """Return ports of add-on.""" | ||||||
|  |         return self.data.get(ATTR_PORTS) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_url(self) -> Optional[str]: | ||||||
|  |         """Return URL to ingress url.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def webui(self) -> Optional[str]: | ||||||
|  |         """Return URL to webui or None.""" | ||||||
|  |         return self.data.get(ATTR_WEBUI) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_port(self) -> Optional[int]: | ||||||
|  |         """Return Ingress port.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def panel_icon(self) -> str: | ||||||
|  |         """Return panel icon for Ingress frame.""" | ||||||
|  |         return self.data[ATTR_PANEL_ICON] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def panel_title(self) -> str: | ||||||
|  |         """Return panel icon for Ingress frame.""" | ||||||
|  |         return self.data.get(ATTR_PANEL_TITLE, self.name) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def panel_admin(self) -> str: | ||||||
|  |         """Return panel icon for Ingress frame.""" | ||||||
|  |         return self.data[ATTR_PANEL_ADMIN] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def host_network(self) -> bool: | ||||||
|  |         """Return True if add-on run on host network.""" | ||||||
|  |         return self.data[ATTR_HOST_NETWORK] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def host_pid(self) -> bool: | ||||||
|  |         """Return True if add-on run on host PID namespace.""" | ||||||
|  |         return self.data[ATTR_HOST_PID] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def host_ipc(self) -> bool: | ||||||
|  |         """Return True if add-on run on host IPC namespace.""" | ||||||
|  |         return self.data[ATTR_HOST_IPC] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def host_dbus(self) -> bool: | ||||||
|  |         """Return True if add-on run on host D-BUS.""" | ||||||
|  |         return self.data[ATTR_HOST_DBUS] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def devices(self) -> Optional[List[str]]: | ||||||
|  |         """Return devices of add-on.""" | ||||||
|  |         return self.data.get(ATTR_DEVICES, []) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def auto_uart(self) -> bool: | ||||||
|  |         """Return True if we should map all UART device.""" | ||||||
|  |         return self.data[ATTR_AUTO_UART] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def tmpfs(self) -> Optional[str]: | ||||||
|  |         """Return tmpfs of add-on.""" | ||||||
|  |         return self.data.get(ATTR_TMPFS) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def environment(self) -> Optional[Dict[str, str]]: | ||||||
|  |         """Return environment of add-on.""" | ||||||
|  |         return self.data.get(ATTR_ENVIRONMENT) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def privileged(self) -> List[str]: | ||||||
|  |         """Return list of privilege.""" | ||||||
|  |         return self.data.get(ATTR_PRIVILEGED, []) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def apparmor(self) -> str: | ||||||
|  |         """Return True if AppArmor is enabled.""" | ||||||
|  |         if not self.data.get(ATTR_APPARMOR): | ||||||
|  |             return SECURITY_DISABLE | ||||||
|  |         elif self.sys_host.apparmor.exists(self.slug): | ||||||
|  |             return SECURITY_PROFILE | ||||||
|  |         return SECURITY_DEFAULT | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def legacy(self) -> bool: | ||||||
|  |         """Return if the add-on don't support Home Assistant labels.""" | ||||||
|  |         return self.data[ATTR_LEGACY] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def access_docker_api(self) -> bool: | ||||||
|  |         """Return if the add-on need read-only Docker API access.""" | ||||||
|  |         return self.data[ATTR_DOCKER_API] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def access_hassio_api(self) -> bool: | ||||||
|  |         """Return True if the add-on access to Supervisor REASTful API.""" | ||||||
|  |         return self.data[ATTR_HASSIO_API] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def access_homeassistant_api(self) -> bool: | ||||||
|  |         """Return True if the add-on access to Home Assistant API proxy.""" | ||||||
|  |         return self.data[ATTR_HOMEASSISTANT_API] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def hassio_role(self) -> str: | ||||||
|  |         """Return Supervisor role for API.""" | ||||||
|  |         return self.data[ATTR_HASSIO_ROLE] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def snapshot_exclude(self) -> List[str]: | ||||||
|  |         """Return Exclude list for snapshot.""" | ||||||
|  |         return self.data.get(ATTR_SNAPSHOT_EXCLUDE, []) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def default_init(self) -> bool: | ||||||
|  |         """Return True if the add-on have no own init.""" | ||||||
|  |         return self.data[ATTR_INIT] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_stdin(self) -> bool: | ||||||
|  |         """Return True if the add-on access use stdin input.""" | ||||||
|  |         return self.data[ATTR_STDIN] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_ingress(self) -> bool: | ||||||
|  |         """Return True if the add-on access support ingress.""" | ||||||
|  |         return self.data[ATTR_INGRESS] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def ingress_panel(self) -> Optional[bool]: | ||||||
|  |         """Return True if the add-on access support ingress.""" | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_gpio(self) -> bool: | ||||||
|  |         """Return True if the add-on access to GPIO interface.""" | ||||||
|  |         return self.data[ATTR_GPIO] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_udev(self) -> bool: | ||||||
|  |         """Return True if the add-on have his own udev.""" | ||||||
|  |         return self.data[ATTR_UDEV] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_kernel_modules(self) -> bool: | ||||||
|  |         """Return True if the add-on access to kernel modules.""" | ||||||
|  |         return self.data[ATTR_KERNEL_MODULES] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_full_access(self) -> bool: | ||||||
|  |         """Return True if the add-on want full access to hardware.""" | ||||||
|  |         return self.data[ATTR_FULL_ACCESS] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_devicetree(self) -> bool: | ||||||
|  |         """Return True if the add-on read access to devicetree.""" | ||||||
|  |         return self.data[ATTR_DEVICETREE] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def access_auth_api(self) -> bool: | ||||||
|  |         """Return True if the add-on access to login/auth backend.""" | ||||||
|  |         return self.data[ATTR_AUTH_API] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_audio(self) -> bool: | ||||||
|  |         """Return True if the add-on access to audio.""" | ||||||
|  |         return self.data[ATTR_AUDIO] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_video(self) -> bool: | ||||||
|  |         """Return True if the add-on access to video.""" | ||||||
|  |         return self.data[ATTR_VIDEO] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def homeassistant_version(self) -> Optional[str]: | ||||||
|  |         """Return min Home Assistant version they needed by Add-on.""" | ||||||
|  |         return self.data.get(ATTR_HOMEASSISTANT) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def url(self) -> Optional[str]: | ||||||
|  |         """Return URL of add-on.""" | ||||||
|  |         return self.data.get(ATTR_URL) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_icon(self) -> bool: | ||||||
|  |         """Return True if an icon exists.""" | ||||||
|  |         return self.path_icon.exists() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_logo(self) -> bool: | ||||||
|  |         """Return True if a logo exists.""" | ||||||
|  |         return self.path_logo.exists() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_changelog(self) -> bool: | ||||||
|  |         """Return True if a changelog exists.""" | ||||||
|  |         return self.path_changelog.exists() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_documentation(self) -> bool: | ||||||
|  |         """Return True if a documentation exists.""" | ||||||
|  |         return self.path_documentation.exists() | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def supported_arch(self) -> List[str]: | ||||||
|  |         """Return list of supported arch.""" | ||||||
|  |         return self.data[ATTR_ARCH] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def supported_machine(self) -> List[str]: | ||||||
|  |         """Return list of supported machine.""" | ||||||
|  |         return self.data.get(ATTR_MACHINE, []) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def image(self) -> str: | ||||||
|  |         """Generate image name from data.""" | ||||||
|  |         return self._image(self.data) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def need_build(self) -> bool: | ||||||
|  |         """Return True if this  add-on need a local build.""" | ||||||
|  |         return ATTR_IMAGE not in self.data | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def map_volumes(self) -> Dict[str, str]: | ||||||
|  |         """Return a dict of {volume: policy} from add-on.""" | ||||||
|  |         volumes = {} | ||||||
|  |         for volume in self.data[ATTR_MAP]: | ||||||
|  |             result = RE_VOLUME.match(volume) | ||||||
|  |             volumes[result.group(1)] = result.group(2) or "ro" | ||||||
|  |  | ||||||
|  |         return volumes | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_location(self) -> Path: | ||||||
|  |         """Return path to this add-on.""" | ||||||
|  |         return Path(self.data[ATTR_LOCATON]) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_icon(self) -> Path: | ||||||
|  |         """Return path to add-on icon.""" | ||||||
|  |         return Path(self.path_location, "icon.png") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_logo(self) -> Path: | ||||||
|  |         """Return path to add-on logo.""" | ||||||
|  |         return Path(self.path_location, "logo.png") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_changelog(self) -> Path: | ||||||
|  |         """Return path to add-on changelog.""" | ||||||
|  |         return Path(self.path_location, "CHANGELOG.md") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_documentation(self) -> Path: | ||||||
|  |         """Return path to add-on changelog.""" | ||||||
|  |         return Path(self.path_location, "DOCS.md") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_apparmor(self) -> Path: | ||||||
|  |         """Return path to custom AppArmor profile.""" | ||||||
|  |         return Path(self.path_location, "apparmor.txt") | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def schema(self) -> vol.Schema: | ||||||
|  |         """Create a schema for add-on options.""" | ||||||
|  |         raw_schema = self.data[ATTR_SCHEMA] | ||||||
|  |  | ||||||
|  |         if isinstance(raw_schema, bool): | ||||||
|  |             return vol.Schema(dict) | ||||||
|  |         return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema))) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def schema_ui(self) -> Optional[List[Dict[str, Any]]]: | ||||||
|  |         """Create a UI schema for add-on options.""" | ||||||
|  |         raw_schema = self.data[ATTR_SCHEMA] | ||||||
|  |  | ||||||
|  |         if isinstance(raw_schema, bool): | ||||||
|  |             return None | ||||||
|  |         return schema_ui_options(raw_schema) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         """Compaired add-on objects.""" | ||||||
|  |         if not isinstance(other, AddonModel): | ||||||
|  |             return False | ||||||
|  |         return self.slug == other.slug | ||||||
|  |  | ||||||
|  |     def _available(self, config) -> bool: | ||||||
|  |         """Return True if this add-on is available on this platform.""" | ||||||
|  |         # Architecture | ||||||
|  |         if not self.sys_arch.is_supported(config[ATTR_ARCH]): | ||||||
|  |             return False | ||||||
|  |  | ||||||
|  |         # Machine / Hardware | ||||||
|  |         machine = config.get(ATTR_MACHINE) | ||||||
|  |         if machine and self.sys_machine not in machine: | ||||||
|  |             return False | ||||||
|  |  | ||||||
|  |         # Home Assistant | ||||||
|  |         version = config.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version | ||||||
|  |         if pkg_version.parse(self.sys_homeassistant.version) < pkg_version.parse( | ||||||
|  |             version | ||||||
|  |         ): | ||||||
|  |             return False | ||||||
|  |  | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |     def _image(self, config) -> str: | ||||||
|  |         """Generate image name from data.""" | ||||||
|  |         # Repository with Dockerhub images | ||||||
|  |         if ATTR_IMAGE in config: | ||||||
|  |             arch = self.sys_arch.match(config[ATTR_ARCH]) | ||||||
|  |             return config[ATTR_IMAGE].format(arch=arch) | ||||||
|  |  | ||||||
|  |         # local build | ||||||
|  |         return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}" | ||||||
|  |  | ||||||
|  |     def install(self) -> Awaitable[None]: | ||||||
|  |         """Install this add-on.""" | ||||||
|  |         return self.sys_addons.install(self.slug) | ||||||
|  |  | ||||||
|  |     def uninstall(self) -> Awaitable[None]: | ||||||
|  |         """Uninstall this add-on.""" | ||||||
|  |         return self.sys_addons.uninstall(self.slug) | ||||||
|  |  | ||||||
|  |     def update(self) -> Awaitable[None]: | ||||||
|  |         """Update this add-on.""" | ||||||
|  |         return self.sys_addons.update(self.slug) | ||||||
|  |  | ||||||
|  |     def rebuild(self) -> Awaitable[None]: | ||||||
|  |         """Rebuild this add-on.""" | ||||||
|  |         return self.sys_addons.rebuild(self.slug) | ||||||
							
								
								
									
										101
									
								
								supervisor/addons/utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										101
									
								
								supervisor/addons/utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,101 @@ | |||||||
|  | """Util add-ons functions.""" | ||||||
|  | from __future__ import annotations | ||||||
|  |  | ||||||
|  | import asyncio | ||||||
|  | import logging | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import TYPE_CHECKING | ||||||
|  |  | ||||||
|  | from ..const import ( | ||||||
|  |     PRIVILEGED_DAC_READ_SEARCH, | ||||||
|  |     PRIVILEGED_NET_ADMIN, | ||||||
|  |     PRIVILEGED_SYS_ADMIN, | ||||||
|  |     PRIVILEGED_SYS_MODULE, | ||||||
|  |     PRIVILEGED_SYS_PTRACE, | ||||||
|  |     PRIVILEGED_SYS_RAWIO, | ||||||
|  |     ROLE_ADMIN, | ||||||
|  |     ROLE_MANAGER, | ||||||
|  |     SECURITY_DISABLE, | ||||||
|  |     SECURITY_PROFILE, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | if TYPE_CHECKING: | ||||||
|  |     from .model import AddonModel | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def rating_security(addon: AddonModel) -> int: | ||||||
|  |     """Return 1-6 for security rating. | ||||||
|  |  | ||||||
|  |     1 = not secure | ||||||
|  |     6 = high secure | ||||||
|  |     """ | ||||||
|  |     rating = 5 | ||||||
|  |  | ||||||
|  |     # AppArmor | ||||||
|  |     if addon.apparmor == SECURITY_DISABLE: | ||||||
|  |         rating += -1 | ||||||
|  |     elif addon.apparmor == SECURITY_PROFILE: | ||||||
|  |         rating += 1 | ||||||
|  |  | ||||||
|  |     # Home Assistant Login & Ingress | ||||||
|  |     if addon.with_ingress: | ||||||
|  |         rating += 2 | ||||||
|  |     elif addon.access_auth_api: | ||||||
|  |         rating += 1 | ||||||
|  |  | ||||||
|  |     # Privileged options | ||||||
|  |     if any( | ||||||
|  |         privilege in addon.privileged | ||||||
|  |         for privilege in ( | ||||||
|  |             PRIVILEGED_NET_ADMIN, | ||||||
|  |             PRIVILEGED_SYS_ADMIN, | ||||||
|  |             PRIVILEGED_SYS_RAWIO, | ||||||
|  |             PRIVILEGED_SYS_PTRACE, | ||||||
|  |             PRIVILEGED_SYS_MODULE, | ||||||
|  |             PRIVILEGED_DAC_READ_SEARCH, | ||||||
|  |         ) | ||||||
|  |     ): | ||||||
|  |         rating += -1 | ||||||
|  |  | ||||||
|  |     # API Supervisor role | ||||||
|  |     if addon.hassio_role == ROLE_MANAGER: | ||||||
|  |         rating += -1 | ||||||
|  |     elif addon.hassio_role == ROLE_ADMIN: | ||||||
|  |         rating += -2 | ||||||
|  |  | ||||||
|  |     # Not secure Networking | ||||||
|  |     if addon.host_network: | ||||||
|  |         rating += -1 | ||||||
|  |  | ||||||
|  |     # Insecure PID namespace | ||||||
|  |     if addon.host_pid: | ||||||
|  |         rating += -2 | ||||||
|  |  | ||||||
|  |     # Full Access | ||||||
|  |     if addon.with_full_access: | ||||||
|  |         rating += -2 | ||||||
|  |  | ||||||
|  |     # Docker Access | ||||||
|  |     if addon.access_docker_api: | ||||||
|  |         rating = 1 | ||||||
|  |  | ||||||
|  |     return max(min(6, rating), 1) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | async def remove_data(folder: Path) -> None: | ||||||
|  |     """Remove folder and reset privileged.""" | ||||||
|  |     try: | ||||||
|  |         proc = await asyncio.create_subprocess_exec( | ||||||
|  |             "rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         _, error_msg = await proc.communicate() | ||||||
|  |     except OSError as err: | ||||||
|  |         error_msg = str(err) | ||||||
|  |     else: | ||||||
|  |         if proc.returncode == 0: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |     _LOGGER.error("Can't remove Add-on Data: %s", error_msg) | ||||||
							
								
								
									
										574
									
								
								supervisor/addons/validate.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										574
									
								
								supervisor/addons/validate.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,574 @@ | |||||||
|  | """Validate add-ons options schema.""" | ||||||
|  | import logging | ||||||
|  | import re | ||||||
|  | import secrets | ||||||
|  | from typing import Any, Dict, List | ||||||
|  | import uuid | ||||||
|  |  | ||||||
|  | import voluptuous as vol | ||||||
|  |  | ||||||
|  | from ..const import ( | ||||||
|  |     ARCH_ALL, | ||||||
|  |     ATTR_ACCESS_TOKEN, | ||||||
|  |     ATTR_ADVANCED, | ||||||
|  |     ATTR_APPARMOR, | ||||||
|  |     ATTR_ARCH, | ||||||
|  |     ATTR_ARGS, | ||||||
|  |     ATTR_AUDIO, | ||||||
|  |     ATTR_AUDIO_INPUT, | ||||||
|  |     ATTR_AUDIO_OUTPUT, | ||||||
|  |     ATTR_AUTH_API, | ||||||
|  |     ATTR_AUTO_UART, | ||||||
|  |     ATTR_AUTO_UPDATE, | ||||||
|  |     ATTR_BOOT, | ||||||
|  |     ATTR_BUILD_FROM, | ||||||
|  |     ATTR_DESCRIPTON, | ||||||
|  |     ATTR_DEVICES, | ||||||
|  |     ATTR_DEVICETREE, | ||||||
|  |     ATTR_DISCOVERY, | ||||||
|  |     ATTR_DOCKER_API, | ||||||
|  |     ATTR_ENVIRONMENT, | ||||||
|  |     ATTR_FULL_ACCESS, | ||||||
|  |     ATTR_GPIO, | ||||||
|  |     ATTR_HASSIO_API, | ||||||
|  |     ATTR_HASSIO_ROLE, | ||||||
|  |     ATTR_HOMEASSISTANT, | ||||||
|  |     ATTR_HOMEASSISTANT_API, | ||||||
|  |     ATTR_HOST_DBUS, | ||||||
|  |     ATTR_HOST_IPC, | ||||||
|  |     ATTR_HOST_NETWORK, | ||||||
|  |     ATTR_HOST_PID, | ||||||
|  |     ATTR_IMAGE, | ||||||
|  |     ATTR_INGRESS, | ||||||
|  |     ATTR_INGRESS_ENTRY, | ||||||
|  |     ATTR_INGRESS_PANEL, | ||||||
|  |     ATTR_INGRESS_PORT, | ||||||
|  |     ATTR_INGRESS_TOKEN, | ||||||
|  |     ATTR_INIT, | ||||||
|  |     ATTR_KERNEL_MODULES, | ||||||
|  |     ATTR_LEGACY, | ||||||
|  |     ATTR_LOCATON, | ||||||
|  |     ATTR_MACHINE, | ||||||
|  |     ATTR_MAP, | ||||||
|  |     ATTR_NAME, | ||||||
|  |     ATTR_NETWORK, | ||||||
|  |     ATTR_OPTIONS, | ||||||
|  |     ATTR_PANEL_ADMIN, | ||||||
|  |     ATTR_PANEL_ICON, | ||||||
|  |     ATTR_PANEL_TITLE, | ||||||
|  |     ATTR_PORTS, | ||||||
|  |     ATTR_PORTS_DESCRIPTION, | ||||||
|  |     ATTR_PRIVILEGED, | ||||||
|  |     ATTR_PROTECTED, | ||||||
|  |     ATTR_REPOSITORY, | ||||||
|  |     ATTR_SCHEMA, | ||||||
|  |     ATTR_SERVICES, | ||||||
|  |     ATTR_SLUG, | ||||||
|  |     ATTR_SNAPSHOT_EXCLUDE, | ||||||
|  |     ATTR_SQUASH, | ||||||
|  |     ATTR_STAGE, | ||||||
|  |     ATTR_STARTUP, | ||||||
|  |     ATTR_STATE, | ||||||
|  |     ATTR_STDIN, | ||||||
|  |     ATTR_SYSTEM, | ||||||
|  |     ATTR_TIMEOUT, | ||||||
|  |     ATTR_TMPFS, | ||||||
|  |     ATTR_UDEV, | ||||||
|  |     ATTR_URL, | ||||||
|  |     ATTR_USER, | ||||||
|  |     ATTR_UUID, | ||||||
|  |     ATTR_VERSION, | ||||||
|  |     ATTR_VIDEO, | ||||||
|  |     ATTR_WEBUI, | ||||||
|  |     BOOT_AUTO, | ||||||
|  |     BOOT_MANUAL, | ||||||
|  |     PRIVILEGED_ALL, | ||||||
|  |     ROLE_ALL, | ||||||
|  |     ROLE_DEFAULT, | ||||||
|  |     STARTUP_ALL, | ||||||
|  |     STARTUP_APPLICATION, | ||||||
|  |     STARTUP_SERVICES, | ||||||
|  |     STATE_STARTED, | ||||||
|  |     STATE_STOPPED, | ||||||
|  |     AddonStages, | ||||||
|  | ) | ||||||
|  | from ..coresys import CoreSys | ||||||
|  | from ..discovery.validate import valid_discovery_service | ||||||
|  | from ..validate import ( | ||||||
|  |     DOCKER_PORTS, | ||||||
|  |     DOCKER_PORTS_DESCRIPTION, | ||||||
|  |     network_port, | ||||||
|  |     token, | ||||||
|  |     uuid_match, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$") | ||||||
|  | RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") | ||||||
|  |  | ||||||
|  | V_STR = "str" | ||||||
|  | V_INT = "int" | ||||||
|  | V_FLOAT = "float" | ||||||
|  | V_BOOL = "bool" | ||||||
|  | V_PASSWORD = "password" | ||||||
|  | V_EMAIL = "email" | ||||||
|  | V_URL = "url" | ||||||
|  | V_PORT = "port" | ||||||
|  | V_MATCH = "match" | ||||||
|  | V_LIST = "list" | ||||||
|  |  | ||||||
|  | RE_SCHEMA_ELEMENT = re.compile( | ||||||
|  |     r"^(?:" | ||||||
|  |     r"|bool|email|url|port" | ||||||
|  |     r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?" | ||||||
|  |     r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?" | ||||||
|  |     r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?" | ||||||
|  |     r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?" | ||||||
|  |     r"|match\((?P<match>.*)\)" | ||||||
|  |     r"|list\((?P<list>.+)\)" | ||||||
|  |     r")\??$" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | _SCHEMA_LENGTH_PARTS = ( | ||||||
|  |     "i_min", | ||||||
|  |     "i_max", | ||||||
|  |     "f_min", | ||||||
|  |     "f_max", | ||||||
|  |     "s_min", | ||||||
|  |     "s_max", | ||||||
|  |     "p_min", | ||||||
|  |     "p_max", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$") | ||||||
|  | RE_DOCKER_IMAGE_BUILD = re.compile( | ||||||
|  |     r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | MACHINE_ALL = [ | ||||||
|  |     "intel-nuc", | ||||||
|  |     "odroid-c2", | ||||||
|  |     "odroid-n2", | ||||||
|  |     "odroid-xu", | ||||||
|  |     "qemuarm-64", | ||||||
|  |     "qemuarm", | ||||||
|  |     "qemux86-64", | ||||||
|  |     "qemux86", | ||||||
|  |     "raspberrypi", | ||||||
|  |     "raspberrypi2", | ||||||
|  |     "raspberrypi3-64", | ||||||
|  |     "raspberrypi3", | ||||||
|  |     "raspberrypi4-64", | ||||||
|  |     "raspberrypi4", | ||||||
|  |     "tinker", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _simple_startup(value): | ||||||
|  |     """Simple startup schema.""" | ||||||
|  |     if value == "before": | ||||||
|  |         return STARTUP_SERVICES | ||||||
|  |     if value == "after": | ||||||
|  |         return STARTUP_APPLICATION | ||||||
|  |     return value | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | SCHEMA_ADDON_CONFIG = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_NAME): vol.Coerce(str), | ||||||
|  |         vol.Required(ATTR_VERSION): vol.Coerce(str), | ||||||
|  |         vol.Required(ATTR_SLUG): vol.Coerce(str), | ||||||
|  |         vol.Required(ATTR_DESCRIPTON): vol.Coerce(str), | ||||||
|  |         vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)], | ||||||
|  |         vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)], | ||||||
|  |         vol.Optional(ATTR_URL): vol.Url(), | ||||||
|  |         vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)), | ||||||
|  |         vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), | ||||||
|  |         vol.Optional(ATTR_INIT, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_STAGE, default=AddonStages.STABLE): vol.Coerce(AddonStages), | ||||||
|  |         vol.Optional(ATTR_PORTS): DOCKER_PORTS, | ||||||
|  |         vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION, | ||||||
|  |         vol.Optional(ATTR_WEBUI): vol.Match( | ||||||
|  |             r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$" | ||||||
|  |         ), | ||||||
|  |         vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any( | ||||||
|  |             network_port, vol.Equal(0) | ||||||
|  |         ), | ||||||
|  |         vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str), | ||||||
|  |         vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str), | ||||||
|  |         vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str), | ||||||
|  |         vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)), | ||||||
|  |         vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")], | ||||||
|  |         vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_UDEV, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"), | ||||||
|  |         vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)], | ||||||
|  |         vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)}, | ||||||
|  |         vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)], | ||||||
|  |         vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_VIDEO, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_GPIO, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL), | ||||||
|  |         vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_STDIN, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)], | ||||||
|  |         vol.Optional(ATTR_DISCOVERY): [valid_discovery_service], | ||||||
|  |         vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)], | ||||||
|  |         vol.Required(ATTR_OPTIONS): dict, | ||||||
|  |         vol.Required(ATTR_SCHEMA): vol.Any( | ||||||
|  |             vol.Schema( | ||||||
|  |                 { | ||||||
|  |                     vol.Coerce(str): vol.Any( | ||||||
|  |                         SCHEMA_ELEMENT, | ||||||
|  |                         [ | ||||||
|  |                             vol.Any( | ||||||
|  |                                 SCHEMA_ELEMENT, | ||||||
|  |                                 { | ||||||
|  |                                     vol.Coerce(str): vol.Any( | ||||||
|  |                                         SCHEMA_ELEMENT, [SCHEMA_ELEMENT] | ||||||
|  |                                     ) | ||||||
|  |                                 }, | ||||||
|  |                             ) | ||||||
|  |                         ], | ||||||
|  |                         vol.Schema( | ||||||
|  |                             {vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])} | ||||||
|  |                         ), | ||||||
|  |                     ) | ||||||
|  |                 } | ||||||
|  |             ), | ||||||
|  |             False, | ||||||
|  |         ), | ||||||
|  |         vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE), | ||||||
|  |         vol.Optional(ATTR_TIMEOUT, default=10): vol.All( | ||||||
|  |             vol.Coerce(int), vol.Range(min=10, max=300) | ||||||
|  |         ), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | SCHEMA_BUILD_CONFIG = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema( | ||||||
|  |             {vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)} | ||||||
|  |         ), | ||||||
|  |         vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_ARGS, default=dict): vol.Schema( | ||||||
|  |             {vol.Coerce(str): vol.Coerce(str)} | ||||||
|  |         ), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | SCHEMA_ADDON_USER = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_VERSION): vol.Coerce(str), | ||||||
|  |         vol.Optional(ATTR_IMAGE): vol.Coerce(str), | ||||||
|  |         vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match, | ||||||
|  |         vol.Optional(ATTR_ACCESS_TOKEN): token, | ||||||
|  |         vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce( | ||||||
|  |             str | ||||||
|  |         ), | ||||||
|  |         vol.Optional(ATTR_OPTIONS, default=dict): dict, | ||||||
|  |         vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), | ||||||
|  |         vol.Optional(ATTR_NETWORK): DOCKER_PORTS, | ||||||
|  |         vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)), | ||||||
|  |         vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)), | ||||||
|  |         vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_LOCATON): vol.Coerce(str), | ||||||
|  |         vol.Required(ATTR_REPOSITORY): vol.Coerce(str), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | SCHEMA_ADDONS_FILE = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER}, | ||||||
|  |         vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM}, | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | SCHEMA_ADDON_SNAPSHOT = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_USER): SCHEMA_ADDON_USER, | ||||||
|  |         vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM, | ||||||
|  |         vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]), | ||||||
|  |         vol.Required(ATTR_VERSION): vol.Coerce(str), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def validate_options(coresys: CoreSys, raw_schema: Dict[str, Any]): | ||||||
|  |     """Validate schema.""" | ||||||
|  |  | ||||||
|  |     def validate(struct): | ||||||
|  |         """Create schema validator for add-ons options.""" | ||||||
|  |         options = {} | ||||||
|  |  | ||||||
|  |         # read options | ||||||
|  |         for key, value in struct.items(): | ||||||
|  |             # Ignore unknown options / remove from list | ||||||
|  |             if key not in raw_schema: | ||||||
|  |                 _LOGGER.warning("Unknown options %s", key) | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             typ = raw_schema[key] | ||||||
|  |             try: | ||||||
|  |                 if isinstance(typ, list): | ||||||
|  |                     # nested value list | ||||||
|  |                     options[key] = _nested_validate_list(coresys, typ[0], value, key) | ||||||
|  |                 elif isinstance(typ, dict): | ||||||
|  |                     # nested value dict | ||||||
|  |                     options[key] = _nested_validate_dict(coresys, typ, value, key) | ||||||
|  |                 else: | ||||||
|  |                     # normal value | ||||||
|  |                     options[key] = _single_validate(coresys, typ, value, key) | ||||||
|  |             except (IndexError, KeyError): | ||||||
|  |                 raise vol.Invalid(f"Type error for {key}") from None | ||||||
|  |  | ||||||
|  |         _check_missing_options(raw_schema, options, "root") | ||||||
|  |         return options | ||||||
|  |  | ||||||
|  |     return validate | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | # pylint: disable=inconsistent-return-statements | ||||||
|  | def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str): | ||||||
|  |     """Validate a single element.""" | ||||||
|  |     # if required argument | ||||||
|  |     if value is None: | ||||||
|  |         raise vol.Invalid(f"Missing required option '{key}'") | ||||||
|  |  | ||||||
|  |     # Lookup secret | ||||||
|  |     if str(value).startswith("!secret "): | ||||||
|  |         secret: str = value.partition(" ")[2] | ||||||
|  |         value = coresys.secrets.get(secret) | ||||||
|  |         if value is None: | ||||||
|  |             raise vol.Invalid(f"Unknown secret {secret}") | ||||||
|  |  | ||||||
|  |     # parse extend data from type | ||||||
|  |     match = RE_SCHEMA_ELEMENT.match(typ) | ||||||
|  |  | ||||||
|  |     # prepare range | ||||||
|  |     range_args = {} | ||||||
|  |     for group_name in _SCHEMA_LENGTH_PARTS: | ||||||
|  |         group_value = match.group(group_name) | ||||||
|  |         if group_value: | ||||||
|  |             range_args[group_name[2:]] = float(group_value) | ||||||
|  |  | ||||||
|  |     if typ.startswith(V_STR) or typ.startswith(V_PASSWORD): | ||||||
|  |         return vol.All(str(value), vol.Range(**range_args))(value) | ||||||
|  |     elif typ.startswith(V_INT): | ||||||
|  |         return vol.All(vol.Coerce(int), vol.Range(**range_args))(value) | ||||||
|  |     elif typ.startswith(V_FLOAT): | ||||||
|  |         return vol.All(vol.Coerce(float), vol.Range(**range_args))(value) | ||||||
|  |     elif typ.startswith(V_BOOL): | ||||||
|  |         return vol.Boolean()(value) | ||||||
|  |     elif typ.startswith(V_EMAIL): | ||||||
|  |         return vol.Email()(value) | ||||||
|  |     elif typ.startswith(V_URL): | ||||||
|  |         return vol.Url()(value) | ||||||
|  |     elif typ.startswith(V_PORT): | ||||||
|  |         return network_port(value) | ||||||
|  |     elif typ.startswith(V_MATCH): | ||||||
|  |         return vol.Match(match.group("match"))(str(value)) | ||||||
|  |     elif typ.startswith(V_LIST): | ||||||
|  |         return vol.In(match.group("list").split("|"))(str(value)) | ||||||
|  |  | ||||||
|  |     raise vol.Invalid(f"Fatal error for {key} type {typ}") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _nested_validate_list(coresys, typ, data_list, key): | ||||||
|  |     """Validate nested items.""" | ||||||
|  |     options = [] | ||||||
|  |  | ||||||
|  |     for element in data_list: | ||||||
|  |         # Nested? | ||||||
|  |         if isinstance(typ, dict): | ||||||
|  |             c_options = _nested_validate_dict(coresys, typ, element, key) | ||||||
|  |             options.append(c_options) | ||||||
|  |         else: | ||||||
|  |             options.append(_single_validate(coresys, typ, element, key)) | ||||||
|  |  | ||||||
|  |     return options | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _nested_validate_dict(coresys, typ, data_dict, key): | ||||||
|  |     """Validate nested items.""" | ||||||
|  |     options = {} | ||||||
|  |  | ||||||
|  |     for c_key, c_value in data_dict.items(): | ||||||
|  |         # Ignore unknown options / remove from list | ||||||
|  |         if c_key not in typ: | ||||||
|  |             _LOGGER.warning("Unknown options %s", c_key) | ||||||
|  |             continue | ||||||
|  |  | ||||||
|  |         # Nested? | ||||||
|  |         if isinstance(typ[c_key], list): | ||||||
|  |             options[c_key] = _nested_validate_list( | ||||||
|  |                 coresys, typ[c_key][0], c_value, c_key | ||||||
|  |             ) | ||||||
|  |         else: | ||||||
|  |             options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key) | ||||||
|  |  | ||||||
|  |     _check_missing_options(typ, options, key) | ||||||
|  |     return options | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _check_missing_options(origin, exists, root): | ||||||
|  |     """Check if all options are exists.""" | ||||||
|  |     missing = set(origin) - set(exists) | ||||||
|  |     for miss_opt in missing: | ||||||
|  |         if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"): | ||||||
|  |             continue | ||||||
|  |         raise vol.Invalid(f"Missing option {miss_opt} in {root}") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]: | ||||||
|  |     """Generate UI schema.""" | ||||||
|  |     ui_schema = [] | ||||||
|  |  | ||||||
|  |     # read options | ||||||
|  |     for key, value in raw_schema.items(): | ||||||
|  |         if isinstance(value, list): | ||||||
|  |             # nested value list | ||||||
|  |             _nested_ui_list(ui_schema, value, key) | ||||||
|  |         elif isinstance(value, dict): | ||||||
|  |             # nested value dict | ||||||
|  |             _nested_ui_dict(ui_schema, value, key) | ||||||
|  |         else: | ||||||
|  |             # normal value | ||||||
|  |             _single_ui_option(ui_schema, value, key) | ||||||
|  |  | ||||||
|  |     return ui_schema | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _single_ui_option( | ||||||
|  |     ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False | ||||||
|  | ) -> None: | ||||||
|  |     """Validate a single element.""" | ||||||
|  |     ui_node = {"name": key} | ||||||
|  |  | ||||||
|  |     # If multiple | ||||||
|  |     if multiple: | ||||||
|  |         ui_node["multiple"] = True | ||||||
|  |  | ||||||
|  |     # Parse extend data from type | ||||||
|  |     match = RE_SCHEMA_ELEMENT.match(value) | ||||||
|  |  | ||||||
|  |     # Prepare range | ||||||
|  |     for group_name in _SCHEMA_LENGTH_PARTS: | ||||||
|  |         group_value = match.group(group_name) | ||||||
|  |         if not group_value: | ||||||
|  |             continue | ||||||
|  |         if group_name[2:] == "min": | ||||||
|  |             ui_node["lengthMin"] = float(group_value) | ||||||
|  |         elif group_name[2:] == "max": | ||||||
|  |             ui_node["lengthMax"] = float(group_value) | ||||||
|  |  | ||||||
|  |     # If required | ||||||
|  |     if value.endswith("?"): | ||||||
|  |         ui_node["optional"] = True | ||||||
|  |     else: | ||||||
|  |         ui_node["required"] = True | ||||||
|  |  | ||||||
|  |     # Data types | ||||||
|  |     if value.startswith(V_STR): | ||||||
|  |         ui_node["type"] = "string" | ||||||
|  |     elif value.startswith(V_PASSWORD): | ||||||
|  |         ui_node["type"] = "string" | ||||||
|  |         ui_node["format"] = "password" | ||||||
|  |     elif value.startswith(V_INT): | ||||||
|  |         ui_node["type"] = "integer" | ||||||
|  |     elif value.startswith(V_FLOAT): | ||||||
|  |         ui_node["type"] = "float" | ||||||
|  |     elif value.startswith(V_BOOL): | ||||||
|  |         ui_node["type"] = "boolean" | ||||||
|  |     elif value.startswith(V_EMAIL): | ||||||
|  |         ui_node["type"] = "string" | ||||||
|  |         ui_node["format"] = "email" | ||||||
|  |     elif value.startswith(V_URL): | ||||||
|  |         ui_node["type"] = "string" | ||||||
|  |         ui_node["format"] = "url" | ||||||
|  |     elif value.startswith(V_PORT): | ||||||
|  |         ui_node["type"] = "integer" | ||||||
|  |     elif value.startswith(V_MATCH): | ||||||
|  |         ui_node["type"] = "string" | ||||||
|  |     elif value.startswith(V_LIST): | ||||||
|  |         ui_node["type"] = "select" | ||||||
|  |         ui_node["options"] = match.group("list").split("|") | ||||||
|  |  | ||||||
|  |     ui_schema.append(ui_node) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _nested_ui_list( | ||||||
|  |     ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str | ||||||
|  | ) -> None: | ||||||
|  |     """UI nested list items.""" | ||||||
|  |     try: | ||||||
|  |         element = option_list[0] | ||||||
|  |     except IndexError: | ||||||
|  |         _LOGGER.error("Invalid schema %s", key) | ||||||
|  |         return | ||||||
|  |  | ||||||
|  |     if isinstance(element, dict): | ||||||
|  |         _nested_ui_dict(ui_schema, element, key, multiple=True) | ||||||
|  |     else: | ||||||
|  |         _single_ui_option(ui_schema, element, key, multiple=True) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _nested_ui_dict( | ||||||
|  |     ui_schema: List[Dict[str, Any]], | ||||||
|  |     option_dict: Dict[str, Any], | ||||||
|  |     key: str, | ||||||
|  |     multiple: bool = False, | ||||||
|  | ) -> None: | ||||||
|  |     """UI nested dict items.""" | ||||||
|  |     ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple} | ||||||
|  |  | ||||||
|  |     nested_schema = [] | ||||||
|  |     for c_key, c_value in option_dict.items(): | ||||||
|  |         # Nested? | ||||||
|  |         if isinstance(c_value, list): | ||||||
|  |             _nested_ui_list(nested_schema, c_value, c_key) | ||||||
|  |         else: | ||||||
|  |             _single_ui_option(nested_schema, c_value, c_key) | ||||||
|  |  | ||||||
|  |     ui_node["schema"] = nested_schema | ||||||
|  |     ui_schema.append(ui_node) | ||||||
							
								
								
									
										396
									
								
								supervisor/api/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										396
									
								
								supervisor/api/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,396 @@ | |||||||
|  | """Init file for Supervisor RESTful API.""" | ||||||
|  | import logging | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import Optional | ||||||
|  |  | ||||||
|  | from aiohttp import web | ||||||
|  |  | ||||||
|  | from ..coresys import CoreSys, CoreSysAttributes | ||||||
|  | from .addons import APIAddons | ||||||
|  | from .audio import APIAudio | ||||||
|  | from .auth import APIAuth | ||||||
|  | from .cli import APICli | ||||||
|  | from .discovery import APIDiscovery | ||||||
|  | from .dns import APICoreDNS | ||||||
|  | from .hardware import APIHardware | ||||||
|  | from .os import APIOS | ||||||
|  | from .homeassistant import APIHomeAssistant | ||||||
|  | from .host import APIHost | ||||||
|  | from .info import APIInfo | ||||||
|  | from .ingress import APIIngress | ||||||
|  | from .proxy import APIProxy | ||||||
|  | from .security import SecurityMiddleware | ||||||
|  | from .services import APIServices | ||||||
|  | from .snapshots import APISnapshots | ||||||
|  | from .supervisor import APISupervisor | ||||||
|  | from .multicast import APIMulticast | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | MAX_CLIENT_SIZE: int = 1024 ** 2 * 16 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class RestAPI(CoreSysAttributes): | ||||||
|  |     """Handle RESTful API for Supervisor.""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coresys: CoreSys): | ||||||
|  |         """Initialize Docker base wrapper.""" | ||||||
|  |         self.coresys: CoreSys = coresys | ||||||
|  |         self.security: SecurityMiddleware = SecurityMiddleware(coresys) | ||||||
|  |         self.webapp: web.Application = web.Application( | ||||||
|  |             client_max_size=MAX_CLIENT_SIZE, | ||||||
|  |             middlewares=[self.security.token_validation], | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         # service stuff | ||||||
|  |         self._runner: web.AppRunner = web.AppRunner(self.webapp) | ||||||
|  |         self._site: Optional[web.TCPSite] = None | ||||||
|  |  | ||||||
|  |     async def load(self) -> None: | ||||||
|  |         """Register REST API Calls.""" | ||||||
|  |         self._register_supervisor() | ||||||
|  |         self._register_host() | ||||||
|  |         self._register_os() | ||||||
|  |         self._register_cli() | ||||||
|  |         self._register_multicast() | ||||||
|  |         self._register_hardware() | ||||||
|  |         self._register_homeassistant() | ||||||
|  |         self._register_proxy() | ||||||
|  |         self._register_panel() | ||||||
|  |         self._register_addons() | ||||||
|  |         self._register_ingress() | ||||||
|  |         self._register_snapshots() | ||||||
|  |         self._register_discovery() | ||||||
|  |         self._register_services() | ||||||
|  |         self._register_info() | ||||||
|  |         self._register_auth() | ||||||
|  |         self._register_dns() | ||||||
|  |         self._register_audio() | ||||||
|  |  | ||||||
|  |     def _register_host(self) -> None: | ||||||
|  |         """Register hostcontrol functions.""" | ||||||
|  |         api_host = APIHost() | ||||||
|  |         api_host.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/host/info", api_host.info), | ||||||
|  |                 web.get("/host/logs", api_host.logs), | ||||||
|  |                 web.post("/host/reboot", api_host.reboot), | ||||||
|  |                 web.post("/host/shutdown", api_host.shutdown), | ||||||
|  |                 web.post("/host/reload", api_host.reload), | ||||||
|  |                 web.post("/host/options", api_host.options), | ||||||
|  |                 web.get("/host/services", api_host.services), | ||||||
|  |                 web.post("/host/services/{service}/stop", api_host.service_stop), | ||||||
|  |                 web.post("/host/services/{service}/start", api_host.service_start), | ||||||
|  |                 web.post("/host/services/{service}/restart", api_host.service_restart), | ||||||
|  |                 web.post("/host/services/{service}/reload", api_host.service_reload), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_os(self) -> None: | ||||||
|  |         """Register OS functions.""" | ||||||
|  |         api_os = APIOS() | ||||||
|  |         api_os.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/os/info", api_os.info), | ||||||
|  |                 web.post("/os/update", api_os.update), | ||||||
|  |                 web.post("/os/config/sync", api_os.config_sync), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_cli(self) -> None: | ||||||
|  |         """Register HA cli functions.""" | ||||||
|  |         api_cli = APICli() | ||||||
|  |         api_cli.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/cli/info", api_cli.info), | ||||||
|  |                 web.get("/cli/stats", api_cli.stats), | ||||||
|  |                 web.post("/cli/update", api_cli.update), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_multicast(self) -> None: | ||||||
|  |         """Register Multicast functions.""" | ||||||
|  |         api_multicast = APIMulticast() | ||||||
|  |         api_multicast.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/multicast/info", api_multicast.info), | ||||||
|  |                 web.get("/multicast/stats", api_multicast.stats), | ||||||
|  |                 web.get("/multicast/logs", api_multicast.logs), | ||||||
|  |                 web.post("/multicast/update", api_multicast.update), | ||||||
|  |                 web.post("/multicast/restart", api_multicast.restart), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_hardware(self) -> None: | ||||||
|  |         """Register hardware functions.""" | ||||||
|  |         api_hardware = APIHardware() | ||||||
|  |         api_hardware.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/hardware/info", api_hardware.info), | ||||||
|  |                 web.get("/hardware/audio", api_hardware.audio), | ||||||
|  |                 web.post("/hardware/trigger", api_hardware.trigger), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_info(self) -> None: | ||||||
|  |         """Register info functions.""" | ||||||
|  |         api_info = APIInfo() | ||||||
|  |         api_info.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes([web.get("/info", api_info.info)]) | ||||||
|  |  | ||||||
|  |     def _register_auth(self) -> None: | ||||||
|  |         """Register auth functions.""" | ||||||
|  |         api_auth = APIAuth() | ||||||
|  |         api_auth.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [web.post("/auth", api_auth.auth), web.post("/auth/reset", api_auth.reset)] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_supervisor(self) -> None: | ||||||
|  |         """Register Supervisor functions.""" | ||||||
|  |         api_supervisor = APISupervisor() | ||||||
|  |         api_supervisor.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/supervisor/ping", api_supervisor.ping), | ||||||
|  |                 web.get("/supervisor/info", api_supervisor.info), | ||||||
|  |                 web.get("/supervisor/stats", api_supervisor.stats), | ||||||
|  |                 web.get("/supervisor/logs", api_supervisor.logs), | ||||||
|  |                 web.post("/supervisor/update", api_supervisor.update), | ||||||
|  |                 web.post("/supervisor/reload", api_supervisor.reload), | ||||||
|  |                 web.post("/supervisor/options", api_supervisor.options), | ||||||
|  |                 web.post("/supervisor/repair", api_supervisor.repair), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_homeassistant(self) -> None: | ||||||
|  |         """Register Home Assistant functions.""" | ||||||
|  |         api_hass = APIHomeAssistant() | ||||||
|  |         api_hass.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/core/info", api_hass.info), | ||||||
|  |                 web.get("/core/logs", api_hass.logs), | ||||||
|  |                 web.get("/core/stats", api_hass.stats), | ||||||
|  |                 web.post("/core/options", api_hass.options), | ||||||
|  |                 web.post("/core/update", api_hass.update), | ||||||
|  |                 web.post("/core/restart", api_hass.restart), | ||||||
|  |                 web.post("/core/stop", api_hass.stop), | ||||||
|  |                 web.post("/core/start", api_hass.start), | ||||||
|  |                 web.post("/core/check", api_hass.check), | ||||||
|  |                 web.post("/core/rebuild", api_hass.rebuild), | ||||||
|  |                 # Remove with old Supervisor fallback | ||||||
|  |                 web.get("/homeassistant/info", api_hass.info), | ||||||
|  |                 web.get("/homeassistant/logs", api_hass.logs), | ||||||
|  |                 web.get("/homeassistant/stats", api_hass.stats), | ||||||
|  |                 web.post("/homeassistant/options", api_hass.options), | ||||||
|  |                 web.post("/homeassistant/update", api_hass.update), | ||||||
|  |                 web.post("/homeassistant/restart", api_hass.restart), | ||||||
|  |                 web.post("/homeassistant/stop", api_hass.stop), | ||||||
|  |                 web.post("/homeassistant/start", api_hass.start), | ||||||
|  |                 web.post("/homeassistant/check", api_hass.check), | ||||||
|  |                 web.post("/homeassistant/rebuild", api_hass.rebuild), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_proxy(self) -> None: | ||||||
|  |         """Register Home Assistant API Proxy.""" | ||||||
|  |         api_proxy = APIProxy() | ||||||
|  |         api_proxy.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/core/api/websocket", api_proxy.websocket), | ||||||
|  |                 web.get("/core/websocket", api_proxy.websocket), | ||||||
|  |                 web.get("/core/api/stream", api_proxy.stream), | ||||||
|  |                 web.post("/core/api/{path:.+}", api_proxy.api), | ||||||
|  |                 web.get("/core/api/{path:.+}", api_proxy.api), | ||||||
|  |                 web.get("/core/api/", api_proxy.api), | ||||||
|  |                 # Remove with old Supervisor fallback | ||||||
|  |                 web.get("/homeassistant/api/websocket", api_proxy.websocket), | ||||||
|  |                 web.get("/homeassistant/websocket", api_proxy.websocket), | ||||||
|  |                 web.get("/homeassistant/api/stream", api_proxy.stream), | ||||||
|  |                 web.post("/homeassistant/api/{path:.+}", api_proxy.api), | ||||||
|  |                 web.get("/homeassistant/api/{path:.+}", api_proxy.api), | ||||||
|  |                 web.get("/homeassistant/api/", api_proxy.api), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_addons(self) -> None: | ||||||
|  |         """Register Add-on functions.""" | ||||||
|  |         api_addons = APIAddons() | ||||||
|  |         api_addons.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/addons", api_addons.list), | ||||||
|  |                 web.post("/addons/reload", api_addons.reload), | ||||||
|  |                 web.get("/addons/{addon}/info", api_addons.info), | ||||||
|  |                 web.post("/addons/{addon}/install", api_addons.install), | ||||||
|  |                 web.post("/addons/{addon}/uninstall", api_addons.uninstall), | ||||||
|  |                 web.post("/addons/{addon}/start", api_addons.start), | ||||||
|  |                 web.post("/addons/{addon}/stop", api_addons.stop), | ||||||
|  |                 web.post("/addons/{addon}/restart", api_addons.restart), | ||||||
|  |                 web.post("/addons/{addon}/update", api_addons.update), | ||||||
|  |                 web.post("/addons/{addon}/options", api_addons.options), | ||||||
|  |                 web.post("/addons/{addon}/rebuild", api_addons.rebuild), | ||||||
|  |                 web.get("/addons/{addon}/logs", api_addons.logs), | ||||||
|  |                 web.get("/addons/{addon}/icon", api_addons.icon), | ||||||
|  |                 web.get("/addons/{addon}/logo", api_addons.logo), | ||||||
|  |                 web.get("/addons/{addon}/changelog", api_addons.changelog), | ||||||
|  |                 web.get("/addons/{addon}/documentation", api_addons.documentation), | ||||||
|  |                 web.post("/addons/{addon}/stdin", api_addons.stdin), | ||||||
|  |                 web.post("/addons/{addon}/security", api_addons.security), | ||||||
|  |                 web.get("/addons/{addon}/stats", api_addons.stats), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_ingress(self) -> None: | ||||||
|  |         """Register Ingress functions.""" | ||||||
|  |         api_ingress = APIIngress() | ||||||
|  |         api_ingress.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.post("/ingress/session", api_ingress.create_session), | ||||||
|  |                 web.get("/ingress/panels", api_ingress.panels), | ||||||
|  |                 web.view("/ingress/{token}/{path:.*}", api_ingress.handler), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_snapshots(self) -> None: | ||||||
|  |         """Register snapshots functions.""" | ||||||
|  |         api_snapshots = APISnapshots() | ||||||
|  |         api_snapshots.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/snapshots", api_snapshots.list), | ||||||
|  |                 web.post("/snapshots/reload", api_snapshots.reload), | ||||||
|  |                 web.post("/snapshots/new/full", api_snapshots.snapshot_full), | ||||||
|  |                 web.post("/snapshots/new/partial", api_snapshots.snapshot_partial), | ||||||
|  |                 web.post("/snapshots/new/upload", api_snapshots.upload), | ||||||
|  |                 web.get("/snapshots/{snapshot}/info", api_snapshots.info), | ||||||
|  |                 web.post("/snapshots/{snapshot}/remove", api_snapshots.remove), | ||||||
|  |                 web.post( | ||||||
|  |                     "/snapshots/{snapshot}/restore/full", api_snapshots.restore_full | ||||||
|  |                 ), | ||||||
|  |                 web.post( | ||||||
|  |                     "/snapshots/{snapshot}/restore/partial", | ||||||
|  |                     api_snapshots.restore_partial, | ||||||
|  |                 ), | ||||||
|  |                 web.get("/snapshots/{snapshot}/download", api_snapshots.download), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_services(self) -> None: | ||||||
|  |         """Register services functions.""" | ||||||
|  |         api_services = APIServices() | ||||||
|  |         api_services.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/services", api_services.list), | ||||||
|  |                 web.get("/services/{service}", api_services.get_service), | ||||||
|  |                 web.post("/services/{service}", api_services.set_service), | ||||||
|  |                 web.delete("/services/{service}", api_services.del_service), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_discovery(self) -> None: | ||||||
|  |         """Register discovery functions.""" | ||||||
|  |         api_discovery = APIDiscovery() | ||||||
|  |         api_discovery.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/discovery", api_discovery.list), | ||||||
|  |                 web.get("/discovery/{uuid}", api_discovery.get_discovery), | ||||||
|  |                 web.delete("/discovery/{uuid}", api_discovery.del_discovery), | ||||||
|  |                 web.post("/discovery", api_discovery.set_discovery), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_dns(self) -> None: | ||||||
|  |         """Register DNS functions.""" | ||||||
|  |         api_dns = APICoreDNS() | ||||||
|  |         api_dns.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/dns/info", api_dns.info), | ||||||
|  |                 web.get("/dns/stats", api_dns.stats), | ||||||
|  |                 web.get("/dns/logs", api_dns.logs), | ||||||
|  |                 web.post("/dns/update", api_dns.update), | ||||||
|  |                 web.post("/dns/options", api_dns.options), | ||||||
|  |                 web.post("/dns/restart", api_dns.restart), | ||||||
|  |                 web.post("/dns/reset", api_dns.reset), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_audio(self) -> None: | ||||||
|  |         """Register Audio functions.""" | ||||||
|  |         api_audio = APIAudio() | ||||||
|  |         api_audio.coresys = self.coresys | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/audio/info", api_audio.info), | ||||||
|  |                 web.get("/audio/stats", api_audio.stats), | ||||||
|  |                 web.get("/audio/logs", api_audio.logs), | ||||||
|  |                 web.post("/audio/update", api_audio.update), | ||||||
|  |                 web.post("/audio/restart", api_audio.restart), | ||||||
|  |                 web.post("/audio/reload", api_audio.reload), | ||||||
|  |                 web.post("/audio/profile", api_audio.set_profile), | ||||||
|  |                 web.post("/audio/volume/{source}/application", api_audio.set_volume), | ||||||
|  |                 web.post("/audio/volume/{source}", api_audio.set_volume), | ||||||
|  |                 web.post("/audio/mute/{source}/application", api_audio.set_mute), | ||||||
|  |                 web.post("/audio/mute/{source}", api_audio.set_mute), | ||||||
|  |                 web.post("/audio/default/{source}", api_audio.set_default), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def _register_panel(self) -> None: | ||||||
|  |         """Register panel for Home Assistant.""" | ||||||
|  |         panel_dir = Path(__file__).parent.joinpath("panel") | ||||||
|  |         self.webapp.add_routes([web.static("/app", panel_dir)]) | ||||||
|  |  | ||||||
|  |     async def start(self) -> None: | ||||||
|  |         """Run RESTful API webserver.""" | ||||||
|  |         await self._runner.setup() | ||||||
|  |         self._site = web.TCPSite( | ||||||
|  |             self._runner, host="0.0.0.0", port=80, shutdown_timeout=5 | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             await self._site.start() | ||||||
|  |         except OSError as err: | ||||||
|  |             _LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s", err) | ||||||
|  |         else: | ||||||
|  |             _LOGGER.info("Start API on %s", self.sys_docker.network.supervisor) | ||||||
|  |  | ||||||
|  |     async def stop(self) -> None: | ||||||
|  |         """Stop RESTful API webserver.""" | ||||||
|  |         if not self._site: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Shutdown running API | ||||||
|  |         await self._site.stop() | ||||||
|  |         await self._runner.cleanup() | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor) | ||||||
							
								
								
									
										460
									
								
								supervisor/api/addons.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										460
									
								
								supervisor/api/addons.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,460 @@ | |||||||
|  | """Init file for Supervisor Home Assistant RESTful API.""" | ||||||
|  | import asyncio | ||||||
|  | import logging | ||||||
|  | from typing import Any, Awaitable, Dict, List | ||||||
|  |  | ||||||
|  | from aiohttp import web | ||||||
|  | import voluptuous as vol | ||||||
|  |  | ||||||
|  | from ..addons import AnyAddon | ||||||
|  | from ..addons.addon import Addon | ||||||
|  | from ..addons.utils import rating_security | ||||||
|  | from ..const import ( | ||||||
|  |     ATTR_ADDONS, | ||||||
|  |     ATTR_ADVANCED, | ||||||
|  |     ATTR_APPARMOR, | ||||||
|  |     ATTR_ARCH, | ||||||
|  |     ATTR_AUDIO, | ||||||
|  |     ATTR_AUDIO_INPUT, | ||||||
|  |     ATTR_AUDIO_OUTPUT, | ||||||
|  |     ATTR_AUTH_API, | ||||||
|  |     ATTR_AUTO_UPDATE, | ||||||
|  |     ATTR_AVAILABLE, | ||||||
|  |     ATTR_BLK_READ, | ||||||
|  |     ATTR_BLK_WRITE, | ||||||
|  |     ATTR_BOOT, | ||||||
|  |     ATTR_BUILD, | ||||||
|  |     ATTR_CHANGELOG, | ||||||
|  |     ATTR_CPU_PERCENT, | ||||||
|  |     ATTR_DESCRIPTON, | ||||||
|  |     ATTR_DETACHED, | ||||||
|  |     ATTR_DEVICES, | ||||||
|  |     ATTR_DEVICETREE, | ||||||
|  |     ATTR_DISCOVERY, | ||||||
|  |     ATTR_DNS, | ||||||
|  |     ATTR_DOCKER_API, | ||||||
|  |     ATTR_DOCUMENTATION, | ||||||
|  |     ATTR_FULL_ACCESS, | ||||||
|  |     ATTR_GPIO, | ||||||
|  |     ATTR_HASSIO_API, | ||||||
|  |     ATTR_HASSIO_ROLE, | ||||||
|  |     ATTR_HOMEASSISTANT, | ||||||
|  |     ATTR_HOMEASSISTANT_API, | ||||||
|  |     ATTR_HOST_DBUS, | ||||||
|  |     ATTR_HOST_IPC, | ||||||
|  |     ATTR_HOST_NETWORK, | ||||||
|  |     ATTR_HOST_PID, | ||||||
|  |     ATTR_HOSTNAME, | ||||||
|  |     ATTR_ICON, | ||||||
|  |     ATTR_INGRESS, | ||||||
|  |     ATTR_INGRESS_ENTRY, | ||||||
|  |     ATTR_INGRESS_PANEL, | ||||||
|  |     ATTR_INGRESS_PORT, | ||||||
|  |     ATTR_INGRESS_URL, | ||||||
|  |     ATTR_INSTALLED, | ||||||
|  |     ATTR_IP_ADDRESS, | ||||||
|  |     ATTR_KERNEL_MODULES, | ||||||
|  |     ATTR_VERSION_LATEST, | ||||||
|  |     ATTR_LOGO, | ||||||
|  |     ATTR_LONG_DESCRIPTION, | ||||||
|  |     ATTR_MACHINE, | ||||||
|  |     ATTR_MAINTAINER, | ||||||
|  |     ATTR_MEMORY_LIMIT, | ||||||
|  |     ATTR_MEMORY_PERCENT, | ||||||
|  |     ATTR_MEMORY_USAGE, | ||||||
|  |     ATTR_NAME, | ||||||
|  |     ATTR_NETWORK, | ||||||
|  |     ATTR_NETWORK_DESCRIPTION, | ||||||
|  |     ATTR_NETWORK_RX, | ||||||
|  |     ATTR_NETWORK_TX, | ||||||
|  |     ATTR_OPTIONS, | ||||||
|  |     ATTR_PRIVILEGED, | ||||||
|  |     ATTR_PROTECTED, | ||||||
|  |     ATTR_RATING, | ||||||
|  |     ATTR_REPOSITORIES, | ||||||
|  |     ATTR_REPOSITORY, | ||||||
|  |     ATTR_SCHEMA, | ||||||
|  |     ATTR_SERVICES, | ||||||
|  |     ATTR_SLUG, | ||||||
|  |     ATTR_SOURCE, | ||||||
|  |     ATTR_STAGE, | ||||||
|  |     ATTR_STATE, | ||||||
|  |     ATTR_STDIN, | ||||||
|  |     ATTR_UDEV, | ||||||
|  |     ATTR_URL, | ||||||
|  |     ATTR_VERSION, | ||||||
|  |     ATTR_VIDEO, | ||||||
|  |     ATTR_WEBUI, | ||||||
|  |     BOOT_AUTO, | ||||||
|  |     BOOT_MANUAL, | ||||||
|  |     CONTENT_TYPE_BINARY, | ||||||
|  |     CONTENT_TYPE_PNG, | ||||||
|  |     CONTENT_TYPE_TEXT, | ||||||
|  |     REQUEST_FROM, | ||||||
|  |     STATE_NONE, | ||||||
|  | ) | ||||||
|  | from ..coresys import CoreSysAttributes | ||||||
|  | from ..docker.stats import DockerStats | ||||||
|  | from ..exceptions import APIError | ||||||
|  | from ..validate import DOCKER_PORTS | ||||||
|  | from .utils import api_process, api_process_raw, api_validate | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)}) | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | SCHEMA_OPTIONS = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), | ||||||
|  |         vol.Optional(ATTR_NETWORK): vol.Maybe(DOCKER_PORTS), | ||||||
|  |         vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)), | ||||||
|  |         vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)), | ||||||
|  |         vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()}) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class APIAddons(CoreSysAttributes): | ||||||
|  |     """Handle RESTful API for add-on functions.""" | ||||||
|  |  | ||||||
|  |     def _extract_addon( | ||||||
|  |         self, request: web.Request, check_installed: bool = True | ||||||
|  |     ) -> AnyAddon: | ||||||
|  |         """Return addon, throw an exception it it doesn't exist.""" | ||||||
|  |         addon_slug: str = request.match_info.get("addon") | ||||||
|  |  | ||||||
|  |         # Lookup itself | ||||||
|  |         if addon_slug == "self": | ||||||
|  |             addon = request.get(REQUEST_FROM) | ||||||
|  |             if not isinstance(addon, Addon): | ||||||
|  |                 raise APIError("Self is not an Addon") | ||||||
|  |             return addon | ||||||
|  |  | ||||||
|  |         addon = self.sys_addons.get(addon_slug) | ||||||
|  |         if not addon: | ||||||
|  |             raise APIError("Addon does not exist") | ||||||
|  |  | ||||||
|  |         if check_installed and not addon.is_installed: | ||||||
|  |             raise APIError("Addon is not installed") | ||||||
|  |  | ||||||
|  |         return addon | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def list(self, request: web.Request) -> Dict[str, Any]: | ||||||
|  |         """Return all add-ons or repositories.""" | ||||||
|  |         data_addons = [] | ||||||
|  |         for addon in self.sys_addons.all: | ||||||
|  |             data_addons.append( | ||||||
|  |                 { | ||||||
|  |                     ATTR_NAME: addon.name, | ||||||
|  |                     ATTR_SLUG: addon.slug, | ||||||
|  |                     ATTR_DESCRIPTON: addon.description, | ||||||
|  |                     ATTR_ADVANCED: addon.advanced, | ||||||
|  |                     ATTR_STAGE: addon.stage, | ||||||
|  |                     ATTR_VERSION: addon.latest_version, | ||||||
|  |                     ATTR_INSTALLED: addon.version if addon.is_installed else None, | ||||||
|  |                     ATTR_AVAILABLE: addon.available, | ||||||
|  |                     ATTR_DETACHED: addon.is_detached, | ||||||
|  |                     ATTR_REPOSITORY: addon.repository, | ||||||
|  |                     ATTR_BUILD: addon.need_build, | ||||||
|  |                     ATTR_URL: addon.url, | ||||||
|  |                     ATTR_ICON: addon.with_icon, | ||||||
|  |                     ATTR_LOGO: addon.with_logo, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         data_repositories = [] | ||||||
|  |         for repository in self.sys_store.all: | ||||||
|  |             data_repositories.append( | ||||||
|  |                 { | ||||||
|  |                     ATTR_SLUG: repository.slug, | ||||||
|  |                     ATTR_NAME: repository.name, | ||||||
|  |                     ATTR_SOURCE: repository.source, | ||||||
|  |                     ATTR_URL: repository.url, | ||||||
|  |                     ATTR_MAINTAINER: repository.maintainer, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories} | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def reload(self, request: web.Request) -> None: | ||||||
|  |         """Reload all add-on data from store.""" | ||||||
|  |         await asyncio.shield(self.sys_store.reload()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def info(self, request: web.Request) -> Dict[str, Any]: | ||||||
|  |         """Return add-on information.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request, check_installed=False) | ||||||
|  |  | ||||||
|  |         data = { | ||||||
|  |             ATTR_NAME: addon.name, | ||||||
|  |             ATTR_SLUG: addon.slug, | ||||||
|  |             ATTR_HOSTNAME: addon.hostname, | ||||||
|  |             ATTR_DNS: addon.dns, | ||||||
|  |             ATTR_DESCRIPTON: addon.description, | ||||||
|  |             ATTR_LONG_DESCRIPTION: addon.long_description, | ||||||
|  |             ATTR_ADVANCED: addon.advanced, | ||||||
|  |             ATTR_STAGE: addon.stage, | ||||||
|  |             ATTR_AUTO_UPDATE: None, | ||||||
|  |             ATTR_REPOSITORY: addon.repository, | ||||||
|  |             ATTR_VERSION: None, | ||||||
|  |             ATTR_VERSION_LATEST: addon.latest_version, | ||||||
|  |             ATTR_PROTECTED: addon.protected, | ||||||
|  |             ATTR_RATING: rating_security(addon), | ||||||
|  |             ATTR_BOOT: addon.boot, | ||||||
|  |             ATTR_OPTIONS: addon.options, | ||||||
|  |             ATTR_SCHEMA: addon.schema_ui, | ||||||
|  |             ATTR_ARCH: addon.supported_arch, | ||||||
|  |             ATTR_MACHINE: addon.supported_machine, | ||||||
|  |             ATTR_HOMEASSISTANT: addon.homeassistant_version, | ||||||
|  |             ATTR_URL: addon.url, | ||||||
|  |             ATTR_STATE: STATE_NONE, | ||||||
|  |             ATTR_DETACHED: addon.is_detached, | ||||||
|  |             ATTR_AVAILABLE: addon.available, | ||||||
|  |             ATTR_BUILD: addon.need_build, | ||||||
|  |             ATTR_NETWORK: addon.ports, | ||||||
|  |             ATTR_NETWORK_DESCRIPTION: addon.ports_description, | ||||||
|  |             ATTR_HOST_NETWORK: addon.host_network, | ||||||
|  |             ATTR_HOST_PID: addon.host_pid, | ||||||
|  |             ATTR_HOST_IPC: addon.host_ipc, | ||||||
|  |             ATTR_HOST_DBUS: addon.host_dbus, | ||||||
|  |             ATTR_PRIVILEGED: addon.privileged, | ||||||
|  |             ATTR_FULL_ACCESS: addon.with_full_access, | ||||||
|  |             ATTR_APPARMOR: addon.apparmor, | ||||||
|  |             ATTR_DEVICES: _pretty_devices(addon), | ||||||
|  |             ATTR_ICON: addon.with_icon, | ||||||
|  |             ATTR_LOGO: addon.with_logo, | ||||||
|  |             ATTR_CHANGELOG: addon.with_changelog, | ||||||
|  |             ATTR_DOCUMENTATION: addon.with_documentation, | ||||||
|  |             ATTR_STDIN: addon.with_stdin, | ||||||
|  |             ATTR_WEBUI: None, | ||||||
|  |             ATTR_HASSIO_API: addon.access_hassio_api, | ||||||
|  |             ATTR_HASSIO_ROLE: addon.hassio_role, | ||||||
|  |             ATTR_AUTH_API: addon.access_auth_api, | ||||||
|  |             ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api, | ||||||
|  |             ATTR_GPIO: addon.with_gpio, | ||||||
|  |             ATTR_KERNEL_MODULES: addon.with_kernel_modules, | ||||||
|  |             ATTR_DEVICETREE: addon.with_devicetree, | ||||||
|  |             ATTR_UDEV: addon.with_udev, | ||||||
|  |             ATTR_DOCKER_API: addon.access_docker_api, | ||||||
|  |             ATTR_VIDEO: addon.with_video, | ||||||
|  |             ATTR_AUDIO: addon.with_audio, | ||||||
|  |             ATTR_AUDIO_INPUT: None, | ||||||
|  |             ATTR_AUDIO_OUTPUT: None, | ||||||
|  |             ATTR_SERVICES: _pretty_services(addon), | ||||||
|  |             ATTR_DISCOVERY: addon.discovery, | ||||||
|  |             ATTR_IP_ADDRESS: None, | ||||||
|  |             ATTR_INGRESS: addon.with_ingress, | ||||||
|  |             ATTR_INGRESS_ENTRY: None, | ||||||
|  |             ATTR_INGRESS_URL: None, | ||||||
|  |             ATTR_INGRESS_PORT: None, | ||||||
|  |             ATTR_INGRESS_PANEL: None, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         if addon.is_installed: | ||||||
|  |             data.update( | ||||||
|  |                 { | ||||||
|  |                     ATTR_STATE: await addon.state(), | ||||||
|  |                     ATTR_WEBUI: addon.webui, | ||||||
|  |                     ATTR_INGRESS_ENTRY: addon.ingress_entry, | ||||||
|  |                     ATTR_INGRESS_URL: addon.ingress_url, | ||||||
|  |                     ATTR_INGRESS_PORT: addon.ingress_port, | ||||||
|  |                     ATTR_INGRESS_PANEL: addon.ingress_panel, | ||||||
|  |                     ATTR_AUDIO_INPUT: addon.audio_input, | ||||||
|  |                     ATTR_AUDIO_OUTPUT: addon.audio_output, | ||||||
|  |                     ATTR_AUTO_UPDATE: addon.auto_update, | ||||||
|  |                     ATTR_IP_ADDRESS: str(addon.ip_address), | ||||||
|  |                     ATTR_VERSION: addon.version, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return data | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def options(self, request: web.Request) -> None: | ||||||
|  |         """Store user options for add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |  | ||||||
|  |         # Update secrets for validation | ||||||
|  |         await self.sys_secrets.reload() | ||||||
|  |  | ||||||
|  |         # Extend schema with add-on specific validation | ||||||
|  |         addon_schema = SCHEMA_OPTIONS.extend( | ||||||
|  |             {vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)} | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         # Validate/Process Body | ||||||
|  |         body = await api_validate(addon_schema, request, origin=[ATTR_OPTIONS]) | ||||||
|  |         if ATTR_OPTIONS in body: | ||||||
|  |             addon.options = body[ATTR_OPTIONS] | ||||||
|  |         if ATTR_BOOT in body: | ||||||
|  |             addon.boot = body[ATTR_BOOT] | ||||||
|  |         if ATTR_AUTO_UPDATE in body: | ||||||
|  |             addon.auto_update = body[ATTR_AUTO_UPDATE] | ||||||
|  |         if ATTR_NETWORK in body: | ||||||
|  |             addon.ports = body[ATTR_NETWORK] | ||||||
|  |         if ATTR_AUDIO_INPUT in body: | ||||||
|  |             addon.audio_input = body[ATTR_AUDIO_INPUT] | ||||||
|  |         if ATTR_AUDIO_OUTPUT in body: | ||||||
|  |             addon.audio_output = body[ATTR_AUDIO_OUTPUT] | ||||||
|  |         if ATTR_INGRESS_PANEL in body: | ||||||
|  |             addon.ingress_panel = body[ATTR_INGRESS_PANEL] | ||||||
|  |             await self.sys_ingress.update_hass_panel(addon) | ||||||
|  |  | ||||||
|  |         addon.save_persist() | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def security(self, request: web.Request) -> None: | ||||||
|  |         """Store security options for add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) | ||||||
|  |  | ||||||
|  |         if ATTR_PROTECTED in body: | ||||||
|  |             _LOGGER.warning("Protected flag changing for %s!", addon.slug) | ||||||
|  |             addon.protected = body[ATTR_PROTECTED] | ||||||
|  |  | ||||||
|  |         addon.save_persist() | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def stats(self, request: web.Request) -> Dict[str, Any]: | ||||||
|  |         """Return resource information.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         stats: DockerStats = await addon.stats() | ||||||
|  |  | ||||||
|  |         return { | ||||||
|  |             ATTR_CPU_PERCENT: stats.cpu_percent, | ||||||
|  |             ATTR_MEMORY_USAGE: stats.memory_usage, | ||||||
|  |             ATTR_MEMORY_LIMIT: stats.memory_limit, | ||||||
|  |             ATTR_MEMORY_PERCENT: stats.memory_percent, | ||||||
|  |             ATTR_NETWORK_RX: stats.network_rx, | ||||||
|  |             ATTR_NETWORK_TX: stats.network_tx, | ||||||
|  |             ATTR_BLK_READ: stats.blk_read, | ||||||
|  |             ATTR_BLK_WRITE: stats.blk_write, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def install(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Install add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request, check_installed=False) | ||||||
|  |         return asyncio.shield(addon.install()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def uninstall(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Uninstall add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         return asyncio.shield(addon.uninstall()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def start(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Start add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         return asyncio.shield(addon.start()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def stop(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Stop add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         return asyncio.shield(addon.stop()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def update(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Update add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |  | ||||||
|  |         if addon.latest_version == addon.version: | ||||||
|  |             raise APIError("No update available!") | ||||||
|  |  | ||||||
|  |         return asyncio.shield(addon.update()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def restart(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Restart add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         return asyncio.shield(addon.restart()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def rebuild(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Rebuild local build add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         if not addon.need_build: | ||||||
|  |             raise APIError("Only local build addons are supported") | ||||||
|  |  | ||||||
|  |         return asyncio.shield(addon.rebuild()) | ||||||
|  |  | ||||||
|  |     @api_process_raw(CONTENT_TYPE_BINARY) | ||||||
|  |     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||||
|  |         """Return logs from add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         return addon.logs() | ||||||
|  |  | ||||||
|  |     @api_process_raw(CONTENT_TYPE_PNG) | ||||||
|  |     async def icon(self, request: web.Request) -> bytes: | ||||||
|  |         """Return icon from add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request, check_installed=False) | ||||||
|  |         if not addon.with_icon: | ||||||
|  |             raise APIError("No icon found!") | ||||||
|  |  | ||||||
|  |         with addon.path_icon.open("rb") as png: | ||||||
|  |             return png.read() | ||||||
|  |  | ||||||
|  |     @api_process_raw(CONTENT_TYPE_PNG) | ||||||
|  |     async def logo(self, request: web.Request) -> bytes: | ||||||
|  |         """Return logo from add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request, check_installed=False) | ||||||
|  |         if not addon.with_logo: | ||||||
|  |             raise APIError("No logo found!") | ||||||
|  |  | ||||||
|  |         with addon.path_logo.open("rb") as png: | ||||||
|  |             return png.read() | ||||||
|  |  | ||||||
|  |     @api_process_raw(CONTENT_TYPE_TEXT) | ||||||
|  |     async def changelog(self, request: web.Request) -> str: | ||||||
|  |         """Return changelog from add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request, check_installed=False) | ||||||
|  |         if not addon.with_changelog: | ||||||
|  |             raise APIError("No changelog found!") | ||||||
|  |  | ||||||
|  |         with addon.path_changelog.open("r") as changelog: | ||||||
|  |             return changelog.read() | ||||||
|  |  | ||||||
|  |     @api_process_raw(CONTENT_TYPE_TEXT) | ||||||
|  |     async def documentation(self, request: web.Request) -> str: | ||||||
|  |         """Return documentation from add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request, check_installed=False) | ||||||
|  |         if not addon.with_documentation: | ||||||
|  |             raise APIError("No documentation found!") | ||||||
|  |  | ||||||
|  |         with addon.path_documentation.open("r") as documentation: | ||||||
|  |             return documentation.read() | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def stdin(self, request: web.Request) -> None: | ||||||
|  |         """Write to stdin of add-on.""" | ||||||
|  |         addon: AnyAddon = self._extract_addon(request) | ||||||
|  |         if not addon.with_stdin: | ||||||
|  |             raise APIError("STDIN not supported by add-on") | ||||||
|  |  | ||||||
|  |         data = await request.read() | ||||||
|  |         await asyncio.shield(addon.write_stdin(data)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _pretty_devices(addon: AnyAddon) -> List[str]: | ||||||
|  |     """Return a simplified device list.""" | ||||||
|  |     dev_list = addon.devices | ||||||
|  |     if not dev_list: | ||||||
|  |         return None | ||||||
|  |     return [row.split(":")[0] for row in dev_list] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _pretty_services(addon: AnyAddon) -> List[str]: | ||||||
|  |     """Return a simplified services role list.""" | ||||||
|  |     services = [] | ||||||
|  |     for name, access in addon.services_role.items(): | ||||||
|  |         services.append(f"{name}:{access}") | ||||||
|  |     return services | ||||||
							
								
								
									
										170
									
								
								supervisor/api/audio.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										170
									
								
								supervisor/api/audio.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,170 @@ | |||||||
|  | """Init file for Supervisor Audio RESTful API.""" | ||||||
|  | import asyncio | ||||||
|  | import logging | ||||||
|  | from typing import Any, Awaitable, Dict | ||||||
|  |  | ||||||
|  | from aiohttp import web | ||||||
|  | import attr | ||||||
|  | import voluptuous as vol | ||||||
|  |  | ||||||
|  | from ..const import ( | ||||||
|  |     ATTR_ACTIVE, | ||||||
|  |     ATTR_APPLICATION, | ||||||
|  |     ATTR_AUDIO, | ||||||
|  |     ATTR_BLK_READ, | ||||||
|  |     ATTR_BLK_WRITE, | ||||||
|  |     ATTR_CARD, | ||||||
|  |     ATTR_CPU_PERCENT, | ||||||
|  |     ATTR_HOST, | ||||||
|  |     ATTR_INDEX, | ||||||
|  |     ATTR_INPUT, | ||||||
|  |     ATTR_VERSION_LATEST, | ||||||
|  |     ATTR_MEMORY_LIMIT, | ||||||
|  |     ATTR_MEMORY_PERCENT, | ||||||
|  |     ATTR_MEMORY_USAGE, | ||||||
|  |     ATTR_NAME, | ||||||
|  |     ATTR_NETWORK_RX, | ||||||
|  |     ATTR_NETWORK_TX, | ||||||
|  |     ATTR_OUTPUT, | ||||||
|  |     ATTR_VERSION, | ||||||
|  |     ATTR_VOLUME, | ||||||
|  |     CONTENT_TYPE_BINARY, | ||||||
|  | ) | ||||||
|  | from ..coresys import CoreSysAttributes | ||||||
|  | from ..exceptions import APIError | ||||||
|  | from ..host.sound import StreamType | ||||||
|  | from .utils import api_process, api_process_raw, api_validate | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)}) | ||||||
|  |  | ||||||
|  | SCHEMA_VOLUME = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_INDEX): vol.Coerce(int), | ||||||
|  |         vol.Required(ATTR_VOLUME): vol.Coerce(float), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | SCHEMA_MUTE = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_INDEX): vol.Coerce(int), | ||||||
|  |         vol.Required(ATTR_ACTIVE): vol.Boolean(), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): vol.Coerce(str)}) | ||||||
|  |  | ||||||
|  | SCHEMA_PROFILE = vol.Schema( | ||||||
|  |     {vol.Required(ATTR_CARD): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str)} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class APIAudio(CoreSysAttributes): | ||||||
|  |     """Handle RESTful API for Audio functions.""" | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def info(self, request: web.Request) -> Dict[str, Any]: | ||||||
|  |         """Return Audio information.""" | ||||||
|  |         return { | ||||||
|  |             ATTR_VERSION: self.sys_plugins.audio.version, | ||||||
|  |             ATTR_VERSION_LATEST: self.sys_plugins.audio.latest_version, | ||||||
|  |             ATTR_HOST: str(self.sys_docker.network.audio), | ||||||
|  |             ATTR_AUDIO: { | ||||||
|  |                 ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards], | ||||||
|  |                 ATTR_INPUT: [ | ||||||
|  |                     attr.asdict(stream) for stream in self.sys_host.sound.inputs | ||||||
|  |                 ], | ||||||
|  |                 ATTR_OUTPUT: [ | ||||||
|  |                     attr.asdict(stream) for stream in self.sys_host.sound.outputs | ||||||
|  |                 ], | ||||||
|  |                 ATTR_APPLICATION: [ | ||||||
|  |                     attr.asdict(stream) for stream in self.sys_host.sound.applications | ||||||
|  |                 ], | ||||||
|  |             }, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def stats(self, request: web.Request) -> Dict[str, Any]: | ||||||
|  |         """Return resource information.""" | ||||||
|  |         stats = await self.sys_plugins.audio.stats() | ||||||
|  |  | ||||||
|  |         return { | ||||||
|  |             ATTR_CPU_PERCENT: stats.cpu_percent, | ||||||
|  |             ATTR_MEMORY_USAGE: stats.memory_usage, | ||||||
|  |             ATTR_MEMORY_LIMIT: stats.memory_limit, | ||||||
|  |             ATTR_MEMORY_PERCENT: stats.memory_percent, | ||||||
|  |             ATTR_NETWORK_RX: stats.network_rx, | ||||||
|  |             ATTR_NETWORK_TX: stats.network_tx, | ||||||
|  |             ATTR_BLK_READ: stats.blk_read, | ||||||
|  |             ATTR_BLK_WRITE: stats.blk_write, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def update(self, request: web.Request) -> None: | ||||||
|  |         """Update Audio plugin.""" | ||||||
|  |         body = await api_validate(SCHEMA_VERSION, request) | ||||||
|  |         version = body.get(ATTR_VERSION, self.sys_plugins.audio.latest_version) | ||||||
|  |  | ||||||
|  |         if version == self.sys_plugins.audio.version: | ||||||
|  |             raise APIError("Version {} is already in use".format(version)) | ||||||
|  |         await asyncio.shield(self.sys_plugins.audio.update(version)) | ||||||
|  |  | ||||||
|  |     @api_process_raw(CONTENT_TYPE_BINARY) | ||||||
|  |     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||||
|  |         """Return Audio Docker logs.""" | ||||||
|  |         return self.sys_plugins.audio.logs() | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def restart(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Restart Audio plugin.""" | ||||||
|  |         return asyncio.shield(self.sys_plugins.audio.restart()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def reload(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Reload Audio information.""" | ||||||
|  |         return asyncio.shield(self.sys_host.sound.update()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def set_volume(self, request: web.Request) -> None: | ||||||
|  |         """Set audio volume on stream.""" | ||||||
|  |         source: StreamType = StreamType(request.match_info.get("source")) | ||||||
|  |         application: bool = request.path.endswith("application") | ||||||
|  |         body = await api_validate(SCHEMA_VOLUME, request) | ||||||
|  |  | ||||||
|  |         await asyncio.shield( | ||||||
|  |             self.sys_host.sound.set_volume( | ||||||
|  |                 source, body[ATTR_INDEX], body[ATTR_VOLUME], application | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def set_mute(self, request: web.Request) -> None: | ||||||
|  |         """Mute audio volume on stream.""" | ||||||
|  |         source: StreamType = StreamType(request.match_info.get("source")) | ||||||
|  |         application: bool = request.path.endswith("application") | ||||||
|  |         body = await api_validate(SCHEMA_MUTE, request) | ||||||
|  |  | ||||||
|  |         await asyncio.shield( | ||||||
|  |             self.sys_host.sound.set_mute( | ||||||
|  |                 source, body[ATTR_INDEX], body[ATTR_ACTIVE], application | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def set_default(self, request: web.Request) -> None: | ||||||
|  |         """Set audio default stream.""" | ||||||
|  |         source: StreamType = StreamType(request.match_info.get("source")) | ||||||
|  |         body = await api_validate(SCHEMA_DEFAULT, request) | ||||||
|  |  | ||||||
|  |         await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME])) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def set_profile(self, request: web.Request) -> None: | ||||||
|  |         """Set audio default sources.""" | ||||||
|  |         body = await api_validate(SCHEMA_PROFILE, request) | ||||||
|  |  | ||||||
|  |         await asyncio.shield( | ||||||
|  |             self.sys_host.sound.ativate_profile(body[ATTR_CARD], body[ATTR_NAME]) | ||||||
|  |         ) | ||||||
							
								
								
									
										88
									
								
								supervisor/api/auth.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										88
									
								
								supervisor/api/auth.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,88 @@ | |||||||
|  | """Init file for Supervisor auth/SSO RESTful API.""" | ||||||
|  | import asyncio | ||||||
|  | import logging | ||||||
|  | from typing import Dict | ||||||
|  |  | ||||||
|  | from aiohttp import BasicAuth, web | ||||||
|  | from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE | ||||||
|  | from aiohttp.web_exceptions import HTTPUnauthorized | ||||||
|  | import voluptuous as vol | ||||||
|  |  | ||||||
|  | from ..addons.addon import Addon | ||||||
|  | from ..const import ( | ||||||
|  |     ATTR_PASSWORD, | ||||||
|  |     ATTR_USERNAME, | ||||||
|  |     CONTENT_TYPE_JSON, | ||||||
|  |     CONTENT_TYPE_URL, | ||||||
|  |     REQUEST_FROM, | ||||||
|  | ) | ||||||
|  | from ..coresys import CoreSysAttributes | ||||||
|  | from ..exceptions import APIForbidden | ||||||
|  | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | SCHEMA_PASSWORD_RESET = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_USERNAME): vol.Coerce(str), | ||||||
|  |         vol.Required(ATTR_PASSWORD): vol.Coerce(str), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class APIAuth(CoreSysAttributes): | ||||||
|  |     """Handle RESTful API for auth functions.""" | ||||||
|  |  | ||||||
|  |     def _process_basic(self, request: web.Request, addon: Addon) -> bool: | ||||||
|  |         """Process login request with basic auth. | ||||||
|  |  | ||||||
|  |         Return a coroutine. | ||||||
|  |         """ | ||||||
|  |         auth = BasicAuth.decode(request.headers[AUTHORIZATION]) | ||||||
|  |         return self.sys_auth.check_login(addon, auth.login, auth.password) | ||||||
|  |  | ||||||
|  |     def _process_dict( | ||||||
|  |         self, request: web.Request, addon: Addon, data: Dict[str, str] | ||||||
|  |     ) -> bool: | ||||||
|  |         """Process login with dict data. | ||||||
|  |  | ||||||
|  |         Return a coroutine. | ||||||
|  |         """ | ||||||
|  |         username = data.get("username") or data.get("user") | ||||||
|  |         password = data.get("password") | ||||||
|  |  | ||||||
|  |         return self.sys_auth.check_login(addon, username, password) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def auth(self, request: web.Request) -> bool: | ||||||
|  |         """Process login request.""" | ||||||
|  |         addon = request[REQUEST_FROM] | ||||||
|  |  | ||||||
|  |         if not addon.access_auth_api: | ||||||
|  |             raise APIForbidden("Can't use Home Assistant auth!") | ||||||
|  |  | ||||||
|  |         # BasicAuth | ||||||
|  |         if AUTHORIZATION in request.headers: | ||||||
|  |             return await self._process_basic(request, addon) | ||||||
|  |  | ||||||
|  |         # Json | ||||||
|  |         if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: | ||||||
|  |             data = await request.json() | ||||||
|  |             return await self._process_dict(request, addon, data) | ||||||
|  |  | ||||||
|  |         # URL encoded | ||||||
|  |         if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL: | ||||||
|  |             data = await request.post() | ||||||
|  |             return await self._process_dict(request, addon, data) | ||||||
|  |  | ||||||
|  |         raise HTTPUnauthorized( | ||||||
|  |             headers={WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'} | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def reset(self, request: web.Request) -> None: | ||||||
|  |         """Process reset password request.""" | ||||||
|  |         body: Dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request) | ||||||
|  |         await asyncio.shield( | ||||||
|  |             self.sys_auth.change_password(body[ATTR_USERNAME], body[ATTR_PASSWORD]) | ||||||
|  |         ) | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user