mirror of
				https://github.com/home-assistant/supervisor.git
				synced 2025-10-30 22:19:43 +00:00 
			
		
		
		
	Compare commits
	
		
			618 Commits
		
	
	
		
			2023.08.0
			...
			trigger-sy
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | e415923553 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 95c638991d | ||
|   | e2ada42001 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 22e50b4ace | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 334484de7f | ||
|   | 180a7c3990 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d5f33de808 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6539f0df6f | ||
|   | 1504278223 | ||
|   | 9f3767b23d | ||
|   | e0d7985369 | ||
|   | 2968a5717c | ||
|   | e2b25fe7ce | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8601f5c49a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 42279461e0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 409447d6ca | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5b313db49d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d64618600d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1ee01b1d5e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | af590202c3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 12ca2fb624 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ea95f83742 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e4d4da601c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0582f6fd39 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f254af8326 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3333770246 | ||
|   | ee5ded29ac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f530db98ff | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 911f9d661f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9935eac146 | ||
|   | eae2c9e221 | ||
|   | 1a67fe8a83 | ||
|   | 3af565267b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d09460a971 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c65329442a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 48430dfa28 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 70e2de372d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 75784480ab | ||
|   | 8a70ba841d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 77733829d7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d4b67f1946 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 51ab138bb1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b81413c8b2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2ec33c6ef3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 68b2c38c7c | ||
|   | 1ca22799d1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 549dddcb11 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 131af90469 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c7c39da7c6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8310c426f0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bb8f91e39a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a359b9a3d5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e130ebad1f | ||
|   | f5b996b66c | ||
|   | 05e0c7c3ab | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6c1203e4bf | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5fbcaa8edd | ||
|   | 00d217b5f7 | ||
|   | c0e35376f3 | ||
|   | 2be84e1282 | ||
|   | 08f10c96ef | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 12f8ccdf02 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d63e78cf34 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 65d97ca924 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5770cafea9 | ||
|   | 0177cd9528 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 91a8fae9b5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f16a4ce3ef | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 306f63c75b | ||
|   | 2a0312318d | ||
|   | 695a23a454 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7366673eea | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 53fa0fe215 | ||
|   | 1ba621be60 | ||
|   | 5117364625 | ||
|   | 986b92aee4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 12d26b05af | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e6c9704505 | ||
|   | 8ab396d77c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8438448843 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 362edb9a61 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1ff53e1853 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | cfd28dbb5c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | cbec558289 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ca3a2937d0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3e67fc12c5 | ||
|   | f6faa18409 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 21ae2c2e54 | ||
|   | eb3986bea2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5d6738ced8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2f2fecddf2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 218ba3601e | ||
|   | 4c3f60c44b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | cb85e5e464 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5b46235872 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 70f675ac82 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bf0c714ea4 | ||
|   | c95df56e8d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5f3d851954 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 10c69dcdae | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bdd81ce3a9 | ||
|   | 17ee234be4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 61034dfa7b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 185cd362fb | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e2ca357774 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3dea7fc4e8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 01ba591bc9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 640b7d46e3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d6560c51ee | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3e9b1938c6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 44ce8de71f | ||
|   | 0bbd15bfda | ||
|   | 591b9a4d87 | ||
|   | 5ee7d16687 | ||
|   | 4ab4350c58 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4ea7133fa8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 627d67f9d0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | eb37655598 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 19b62dd0d4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b2ad1ceea3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c1545b5b78 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2c2f04ba85 | ||
|   | 77e7bf51b7 | ||
|   | a42d71dcef | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1ff0432f4d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 54afd6e1c8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 458c493a74 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8ac8ecb17e | ||
|   | eac167067e | ||
|   | aa7f4aafeb | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d2183fa12b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 928f32bb4f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | cbe21303c4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 94987c04b8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d4ba46a846 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1a22d83895 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6b73bf5c28 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c9c9451c36 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1882d448ea | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2f11c9c9e3 | ||
|   | 02bdc4b555 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1a1ee50d9d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 50dc09d1a9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 130efd340c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 00bc13c049 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3caad67f61 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 13783f0d4a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | eae97ba3f4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 134dad7357 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1c4d2e8dec | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f2d7be3aac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d06edb2dd6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7fa15b334a | ||
|   | ffb4e2d6d7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bd8047ae9c | ||
|   | 49bc0624af | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5e1d764eb3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0064d93d75 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5a838ecfe7 | ||
|   | c37b5effd7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ca7f3e8acb | ||
|   | b0cdb91d5e | ||
|   | 4829eb8ae1 | ||
|   | 1bb814b793 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 918fcb7d62 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bbfd899564 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 12c4d9da87 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6b4fd9b6b8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 07c22f4a60 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 252e1e2ac0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b684c8673e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 547f42439d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c51ceb000f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4cbede1bc8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5eac8c7780 | ||
|   | ab78d87304 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 09166e3867 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8a5c813cdd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4200622f43 | ||
|   | c4452a85b4 | ||
|   | e57de4a3c1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9fd2c91c55 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | fbd70013a8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8d18f3e66e | ||
|   | 5f5754e860 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 974c882b9a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a9ea90096b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 45c72c426e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4e5b75fe19 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3cd617e68f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ddff02f73b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b59347b3d3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1dc769076f | ||
|   | f150a19c0f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c4bc1e3824 | ||
|   | eca99b69db | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 043af72847 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 05c7b6c639 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3385c99f1f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 895117f857 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9e3135e2de | ||
|   | 9a1c517437 | ||
|   | c0c0c4b7ad | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | be6e39fed0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b384921ee0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0d05a6eae3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 430aef68c6 | ||
|   | eac6070e12 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6693b7c2e6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7898c3e433 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 420ecd064e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4289be53f8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 29b41b564e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 998eb69583 | ||
|   | 8ebc097ff4 | ||
|   | c05984ca49 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1a700c3013 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a9c92cdec8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | da8b938d5b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 71e91328f1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6356be4c52 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e26e5440b6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | fecfbd1a3e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c00d6dfc76 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 85be66d90d | ||
|   | 1ac506b391 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f7738b77de | ||
|   | 824037bb7d | ||
|   | 221292ad14 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 16f8c75e9f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 90a37079f1 | ||
|   | 798092af5e | ||
|   | 2a622a929d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ca8eeaa68c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d1b8ac1249 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3f629c4d60 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3fa910e68b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e3cf2989c9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 136b2f402d | ||
|   | 8d18d2d9c6 | ||
|   | f18213361a | ||
|   | 18d9d32bca | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1246e429c9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 77bc46bc37 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ce16963c94 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a70e8cfe58 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ba922a1aaa | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b09230a884 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f1cb9ca08e | ||
|   | 06513e88c6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b4a79bd068 | ||
|   | dfd8fe84e0 | ||
|   | 4857c2e243 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7d384f6160 | ||
|   | 672a7621f9 | ||
|   | f0e2fb3f57 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8c3a520512 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 22e50d56db | ||
|   | a0735f3585 | ||
|   | 50a2e8fde3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 55ed63cc79 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 97e9dfff3f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 501c9579fb | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f9aedadee6 | ||
|   | c3c17b2bc3 | ||
|   | a894c4589e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 56a8a1b5a1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | be3f7a6c37 | ||
|   | 906e400ab7 | ||
|   | a9265afd4c | ||
|   | d26058ac80 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ebd1f30606 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c78e077649 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 07619223b0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 25c326ec6c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | df167b94c2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3730908881 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 975dc1bc11 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 31409f0c32 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b19273227b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f89179fb03 | ||
|   | 90c971f9f1 | ||
|   | d685780a4a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b6bc8b7b7c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 92daba898f | ||
|   | 138843591e | ||
|   | 0814552b2a | ||
|   | 0e0fadd72d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5426bd4392 | ||
|   | 3520a65099 | ||
|   | b15a5c2c87 | ||
|   | a8af04ff82 | ||
|   | 2148de45a0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c4143dacee | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a8025e77b3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | dd1e76be93 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 36f997959a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c1faed163a | ||
|   | 9ca927dbe7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 02c6011818 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2e96b16396 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 53b8de6c1c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | daea9f893c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d1b5b1734c | ||
|   | 74a5899626 | ||
|   | 202ebf6d4e | ||
|   | 2c7b417e25 | ||
|   | bb5e138134 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3a2c3e2f84 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d5be0c34ac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ea5431ef2b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9c4cdcd11f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e5ef6333e4 | ||
|   | 98779a48b1 | ||
|   | 9d4848ee77 | ||
|   | 5126820619 | ||
|   | 8b5c808e8c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9c75996c40 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d524778e42 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 52d4bc660e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8884696a6c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d493ccde28 | ||
|   | 1ececaaaa2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 91b48ad432 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f3fe40a19f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | cf4b29c425 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4344e14a9d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | df935ec423 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e7f9f7504e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5721b2353a | ||
|   | c9de846d0e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a598108c26 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5467aa399d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | da052b074a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 90c035edd0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | fc4eb44a24 | ||
|   | a71111b378 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 52e0c7e484 | ||
|   | e32970f191 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 897cc36017 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d79c575860 | ||
|   | 1f19f84edd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 27c37b8b84 | ||
|   | 06a5dd3153 | ||
|   | b5bf270d22 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8e71d69a64 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 06edb6f8a8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | dca82ec0a1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9c82ce4103 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8a23a9eb1b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e1b7e515df | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c8ff335ed7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5736da8ab7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 060bba4dce | ||
|   | 4c573991d2 | ||
|   | 7fd6dce55f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1861d756e9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c36c041f5e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c3d877bdd2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1242030d4a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1626e74608 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b1b913777f | ||
|   | 190894010c | ||
|   | 765265723c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7e20502379 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 366fc30e9d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | aa91788a69 | ||
|   | 375789b019 | ||
|   | 140b769a42 | ||
|   | 88d718271d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6ed26cdd1f | ||
|   | d1851fa607 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e846157c52 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e190bb4c1a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 137fbe7acd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9ccdb2ae3a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f5f7515744 | ||
|   | ddadbec7e3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d24543e103 | ||
|   | f80c4c9565 | ||
|   | 480b383782 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d3efd4c24b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 67a0acffa2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 41b07da399 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a6ce55d5b5 | ||
|   | 98c01fe1b3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 51df986222 | ||
|   | 9c625f93a5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7101d47e2e | ||
|   | eb85be2770 | ||
|   | 2da27937a5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2a29b801a4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 57e65714b0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0ae40cb51c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ddd195dfc6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 54b9f23ec5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 242dd3e626 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1b8acb5b60 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a7ab96ab12 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 06ab11cf87 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1410a1b06e | ||
|   | 5baf19f7a3 | ||
|   | 6c66a7ba17 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 37b6e09475 | ||
|   | e08c8ca26d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2c09e7929f | ||
|   | 3e760f0d85 | ||
|   | 3cc6bd19ad | ||
|   | b7ddfba71d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 32f21d208f | ||
|   | ed7edd9fe0 | ||
|   | fd3c995c7c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c0d1a2d53b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 76bc3015a7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ad2896243b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d0dcded42d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a0dfa01287 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4ec5c90180 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a0c813bfc1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5f7b3a7087 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6426f02a2c | ||
|   | 7fef92c480 | ||
|   | c64744dedf | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 72a2088931 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | db54556b0f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a2653d8462 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ef778238f6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4cc0ddc35d | ||
|   | a0429179a0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5cfb45c668 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a53b7041f5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f534fae293 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f7cbd968d2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 844d76290c | ||
|   | 8c8122eee0 | ||
|   | d63f0d5e0b | ||
|   | 96f4ba5d25 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 72e64676da | ||
|   | 883e54f989 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c2d4be3304 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | de737ddb91 | ||
|   | 11ec6dd9ac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | df7541e397 | ||
|   | 95ac53d780 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e8c4b32a65 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | eca535c978 | ||
|   | 9088810b49 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 172a7053ed | ||
|   | 3d5bd2adef | ||
|   | cb03d039f4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bb31b1bc6e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 727532858e | ||
|   | c0868d9dac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ce26e1dac6 | ||
|   | c74f87ca12 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 043111b91c | ||
|   | 5c579e557c | ||
|   | f8f51740c1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 176b63df52 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e1979357a5 | ||
|   | 030527a4f2 | ||
|   | cca74da1f3 | ||
|   | 928aff342f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 60a97235df | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c77779cf9d | ||
|   | 9351796ba8 | ||
|   | bef0f023d4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3116f183f5 | ||
|   | 16b71a22d1 | ||
|   | 5f4581042c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6976a4cf2e | ||
|   | 68d86b3b7b | ||
|   | d7d34d36c8 | ||
|   | 68da328cc5 | ||
|   | 78870186d7 | ||
|   | d634273b48 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2d970eee02 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1f0ea3c6f7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d736913f7f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3e95a9d282 | ||
|   | 7cd7259992 | ||
|   | 87385cf28e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3a00c94325 | ||
|   | 38d5d2307f | ||
|   | a0c12e7228 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b6625ad909 | ||
|   | 6f01341055 | ||
|   | 6762a4153a | ||
|   | 31200df89f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 18e422ca77 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1b362716e3 | ||
|   | 1e49129197 | ||
|   | a8f818fca5 | ||
|   | 0f600da096 | ||
|   | b04efe4eac | ||
|   | 7361d39231 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 059c0df16c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6f6b849335 | ||
|   | a390500309 | ||
|   | 7c576da32c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6d021c1659 | ||
|   | 37c1c89d44 | ||
|   | 010043f116 | ||
|   | b1010c3c61 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7f0204bfc3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a508cc5efd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 65c90696d5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b9f47898d6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 26f554e46a | ||
|   | b57889c84f | ||
|   | 77fd1b4017 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ab6745bc99 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a5ea3cae72 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8bcd1b4efd | ||
|   | a24657e565 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b7721420fa | ||
|   | 6c564fe4fd | ||
|   | 012bfd7e6c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a70f81aa01 | ||
|   | 1376a38de5 | ||
|   | 1827ecda65 | ||
|   | 994c981228 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5bbfbf44ae | ||
|   | ace58ba735 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f9840306a0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 322b3bbb4e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 501318f468 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0234f38b23 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8743e0072f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a79e06afa7 | ||
|   | 682b8e0535 | ||
|   | d70aa5f9a9 | ||
|   | 1c815dcad1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | afa467a32b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 274218d48e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7e73df26ab | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ef8fc80c95 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 05c39144e3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f5cd35af47 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c69ecdafd0 | ||
|   | fa90c247ec | ||
|   | 0cd7bd47bb | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 36d48d19fc | ||
|   | 9322b68d47 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e11ff64b15 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3776dabfcf | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d4e5831f0f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7b3b478e88 | ||
|   | f5afe13e91 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 49ce468d83 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b26551c812 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 394ba580d2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2f7a54f5fd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 360e085926 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 042921925d | ||
|   | dcf024387b | ||
|   | e1232bc9e7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d96598b5dd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2605f85668 | ||
|   | 2c8e6ca0cd | ||
|   | 0225f574be | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 34090bf2eb | ||
|   | 5ae585ce13 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2bb10a32d7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 435743dd2c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 98589fba6d | ||
|   | 32da679e02 | ||
|   | 44daffc65b | ||
|   | 0aafda1477 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 60604e33b9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 98268b377a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | de54979471 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ee6e339587 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c16cf89318 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c66cb7423e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f5bd95a519 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 500f9ec1c1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a4713d4a1e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 04452dfb1a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 69d09851d9 | ||
|   | 1b649fe5cd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 38572a5a86 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f5f51169e6 | ||
|   | 07c2178ae1 | ||
|   | f30d21361f | ||
|   | 6adb4fbcf7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d73962bd7d | ||
|   | f4b43739da | ||
|   | 4838b280ad | ||
|   | f93b753c03 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | de06361cb0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 15ce48c8aa | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 38758d05a8 | ||
|   | a79fa14ee7 | ||
|   | 1eb95b4d33 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d04e47f5b3 | ||
|   | dad5118f21 | ||
|   | acc0e5c989 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 204fcdf479 | ||
|   | 93ba8a3574 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f2f9e3b514 | ||
|   | 61288559b3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bd2c99a455 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1937348b24 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b7b2fae325 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 11115923b2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 295133d2e9 | ||
|   | 3018b851c8 | ||
|   | 222c3fd485 | ||
|   | 9650fd2ba1 | ||
|   | c88fd9a7d9 | ||
|   | 1611beccd1 | ||
|   | 71077fb0f7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9647fba98f | ||
|   | 86f004e45a | ||
|   | a98334ede8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e19c2d6805 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 847736dab8 | 
| @@ -4,37 +4,45 @@ | ||||
|   "containerEnv": { | ||||
|     "WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}" | ||||
|   }, | ||||
|   "remoteEnv": { | ||||
|     "PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}" | ||||
|   }, | ||||
|   "appPort": ["9123:8123", "7357:4357"], | ||||
|   "postCreateCommand": "bash devcontainer_bootstrap", | ||||
|   "postCreateCommand": "bash devcontainer_setup", | ||||
|   "postStartCommand": "bash devcontainer_bootstrap", | ||||
|   "runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"], | ||||
|   "extensions": [ | ||||
|     "ms-python.python", | ||||
|     "ms-python.vscode-pylance", | ||||
|     "visualstudioexptteam.vscodeintellicode", | ||||
|     "esbenp.prettier-vscode" | ||||
|   ], | ||||
|   "mounts": ["type=volume,target=/var/lib/docker"], | ||||
|   "settings": { | ||||
|     "terminal.integrated.profiles.linux": { | ||||
|       "zsh": { | ||||
|         "path": "/usr/bin/zsh" | ||||
|   "customizations": { | ||||
|     "vscode": { | ||||
|       "extensions": [ | ||||
|         "charliermarsh.ruff", | ||||
|         "ms-python.pylint", | ||||
|         "ms-python.vscode-pylance", | ||||
|         "visualstudioexptteam.vscodeintellicode", | ||||
|         "redhat.vscode-yaml", | ||||
|         "esbenp.prettier-vscode", | ||||
|         "GitHub.vscode-pull-request-github" | ||||
|       ], | ||||
|       "settings": { | ||||
|         "python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python", | ||||
|         "python.pythonPath": "/home/vscode/.local/ha-venv/bin/python", | ||||
|         "python.terminal.activateEnvInCurrentTerminal": true, | ||||
|         "python.testing.pytestArgs": ["--no-cov"], | ||||
|         "pylint.importStrategy": "fromEnvironment", | ||||
|         "editor.formatOnPaste": false, | ||||
|         "editor.formatOnSave": true, | ||||
|         "editor.formatOnType": true, | ||||
|         "files.trimTrailingWhitespace": true, | ||||
|         "terminal.integrated.profiles.linux": { | ||||
|           "zsh": { | ||||
|             "path": "/usr/bin/zsh" | ||||
|           } | ||||
|         }, | ||||
|         "terminal.integrated.defaultProfile.linux": "zsh", | ||||
|         "[python]": { | ||||
|           "editor.defaultFormatter": "charliermarsh.ruff" | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
|     "terminal.integrated.defaultProfile.linux": "zsh", | ||||
|     "editor.formatOnPaste": false, | ||||
|     "editor.formatOnSave": true, | ||||
|     "editor.formatOnType": true, | ||||
|     "files.trimTrailingWhitespace": true, | ||||
|     "python.pythonPath": "/usr/local/bin/python3", | ||||
|     "python.linting.pylintEnabled": true, | ||||
|     "python.linting.enabled": true, | ||||
|     "python.formatting.provider": "black", | ||||
|     "python.formatting.blackArgs": ["--target-version", "py310"], | ||||
|     "python.formatting.blackPath": "/usr/local/bin/black", | ||||
|     "python.linting.banditPath": "/usr/local/bin/bandit", | ||||
|     "python.linting.flake8Path": "/usr/local/bin/flake8", | ||||
|     "python.linting.mypyPath": "/usr/local/bin/mypy", | ||||
|     "python.linting.pylintPath": "/usr/local/bin/pylint", | ||||
|     "python.linting.pydocstylePath": "/usr/local/bin/pydocstyle" | ||||
|   } | ||||
|     } | ||||
|   }, | ||||
|   "mounts": ["type=volume,target=/var/lib/docker"] | ||||
| } | ||||
|   | ||||
							
								
								
									
										9
									
								
								.github/PULL_REQUEST_TEMPLATE.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/PULL_REQUEST_TEMPLATE.md
									
									
									
									
										vendored
									
									
								
							| @@ -38,6 +38,7 @@ | ||||
| - This PR is related to issue: | ||||
| - Link to documentation pull request: | ||||
| - Link to cli pull request: | ||||
| - Link to client library pull request: | ||||
|  | ||||
| ## Checklist | ||||
|  | ||||
| @@ -52,12 +53,14 @@ | ||||
| - [ ] Local tests pass. **Your PR cannot be merged unless tests pass** | ||||
| - [ ] There is no commented out code in this PR. | ||||
| - [ ] I have followed the [development checklist][dev-checklist] | ||||
| - [ ] The code has been formatted using Black (`black --fast supervisor tests`) | ||||
| - [ ] The code has been formatted using Ruff (`ruff format supervisor tests`) | ||||
| - [ ] Tests have been added to verify that the new code works. | ||||
|  | ||||
| If API endpoints of add-on configuration are added/changed: | ||||
| If API endpoints or add-on configuration are added/changed: | ||||
|  | ||||
| - [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository] | ||||
| - [ ] [CLI][cli-repository] updated (if necessary) | ||||
| - [ ] [Client library][client-library-repository] updated (if necessary) | ||||
|  | ||||
| <!-- | ||||
|   Thank you for contributing <3 | ||||
| @@ -67,3 +70,5 @@ If API endpoints of add-on configuration are added/changed: | ||||
|  | ||||
| [dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html | ||||
| [docs-repository]: https://github.com/home-assistant/developers.home-assistant | ||||
| [cli-repository]: https://github.com/home-assistant/cli | ||||
| [client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/ | ||||
|   | ||||
							
								
								
									
										38
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										38
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							| @@ -33,7 +33,7 @@ on: | ||||
|       - setup.py | ||||
|  | ||||
| env: | ||||
|   DEFAULT_PYTHON: "3.11" | ||||
|   DEFAULT_PYTHON: "3.12" | ||||
|   BUILD_NAME: supervisor | ||||
|   BUILD_TYPE: supervisor | ||||
|  | ||||
| @@ -53,7 +53,7 @@ jobs: | ||||
|       requirements: ${{ steps.requirements.outputs.changed }} | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|  | ||||
| @@ -70,13 +70,13 @@ jobs: | ||||
|       - name: Get changed files | ||||
|         id: changed_files | ||||
|         if: steps.version.outputs.publish == 'false' | ||||
|         uses: jitterbit/get-changed-files@v1 | ||||
|         uses: masesgroup/retrieve-changed-files@v3.0.0 | ||||
|  | ||||
|       - name: Check if requirements files changed | ||||
|         id: requirements | ||||
|         run: | | ||||
|           if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then | ||||
|             echo "::set-output name=changed::true" | ||||
|           if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then | ||||
|             echo "changed=true" >> "$GITHUB_OUTPUT" | ||||
|           fi | ||||
|  | ||||
|   build: | ||||
| @@ -92,7 +92,7 @@ jobs: | ||||
|         arch: ${{ fromJson(needs.init.outputs.architectures) }} | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|  | ||||
| @@ -106,13 +106,13 @@ jobs: | ||||
|  | ||||
|       - name: Build wheels | ||||
|         if: needs.init.outputs.requirements == 'true' | ||||
|         uses: home-assistant/wheels@2023.04.0 | ||||
|         uses: home-assistant/wheels@2024.07.1 | ||||
|         with: | ||||
|           abi: cp311 | ||||
|           abi: cp312 | ||||
|           tag: musllinux_1_2 | ||||
|           arch: ${{ matrix.arch }} | ||||
|           wheels-key: ${{ secrets.WHEELS_KEY }} | ||||
|           apk: "libffi-dev;openssl-dev" | ||||
|           apk: "libffi-dev;openssl-dev;yaml-dev" | ||||
|           skip-binary: aiohttp | ||||
|           env-file: true | ||||
|           requirements: "requirements.txt" | ||||
| @@ -125,20 +125,20 @@ jobs: | ||||
|  | ||||
|       - name: Set up Python ${{ env.DEFAULT_PYTHON }} | ||||
|         if: needs.init.outputs.publish == 'true' | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         with: | ||||
|           python-version: ${{ env.DEFAULT_PYTHON }} | ||||
|  | ||||
|       - name: Install Cosign | ||||
|         if: needs.init.outputs.publish == 'true' | ||||
|         uses: sigstore/cosign-installer@v3.1.1 | ||||
|         uses: sigstore/cosign-installer@v3.7.0 | ||||
|         with: | ||||
|           cosign-release: "v2.0.2" | ||||
|           cosign-release: "v2.4.0" | ||||
|  | ||||
|       - name: Install dirhash and calc hash | ||||
|         if: needs.init.outputs.publish == 'true' | ||||
|         run: | | ||||
|           pip3 install dirhash | ||||
|           pip3 install setuptools dirhash | ||||
|           dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")" | ||||
|           echo "${dir_hash}" > rootfs/supervisor.sha256 | ||||
|  | ||||
| @@ -149,7 +149,7 @@ jobs: | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         if: needs.init.outputs.publish == 'true' | ||||
|         uses: docker/login-action@v2.2.0 | ||||
|         uses: docker/login-action@v3.3.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -160,7 +160,7 @@ jobs: | ||||
|         run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV | ||||
|  | ||||
|       - name: Build supervisor | ||||
|         uses: home-assistant/builder@2023.06.1 | ||||
|         uses: home-assistant/builder@2024.08.2 | ||||
|         with: | ||||
|           args: | | ||||
|             $BUILD_ARGS \ | ||||
| @@ -178,7 +178,7 @@ jobs: | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         if: needs.init.outputs.publish == 'true' | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|  | ||||
|       - name: Initialize git | ||||
|         if: needs.init.outputs.publish == 'true' | ||||
| @@ -203,11 +203,11 @@ jobs: | ||||
|     timeout-minutes: 60 | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|  | ||||
|       - name: Build the Supervisor | ||||
|         if: needs.init.outputs.publish != 'true' | ||||
|         uses: home-assistant/builder@2023.06.1 | ||||
|         uses: home-assistant/builder@2024.08.2 | ||||
|         with: | ||||
|           args: | | ||||
|             --test \ | ||||
| @@ -324,7 +324,7 @@ jobs: | ||||
|           if [ "$(echo $test | jq -r '.result')" != "ok" ]; then | ||||
|             exit 1 | ||||
|           fi | ||||
|           echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')" | ||||
|           echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT" | ||||
|  | ||||
|       - name: Uninstall SSH add-on | ||||
|         run: | | ||||
|   | ||||
							
								
								
									
										253
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										253
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -8,8 +8,8 @@ on: | ||||
|   pull_request: ~ | ||||
|  | ||||
| env: | ||||
|   DEFAULT_PYTHON: "3.11" | ||||
|   PRE_COMMIT_HOME: ~/.cache/pre-commit | ||||
|   DEFAULT_PYTHON: "3.12" | ||||
|   PRE_COMMIT_CACHE: ~/.cache/pre-commit | ||||
|  | ||||
| concurrency: | ||||
|   group: "${{ github.workflow }}-${{ github.ref }}" | ||||
| @@ -25,15 +25,15 @@ jobs: | ||||
|     name: Prepare Python dependencies | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python | ||||
|         id: python | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         with: | ||||
|           python-version: ${{ env.DEFAULT_PYTHON }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
| @@ -47,9 +47,10 @@ jobs: | ||||
|           pip install -r requirements.txt -r requirements_tests.txt | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_HOME }} | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           lookup-only: true | ||||
|           key: | | ||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||
|           restore-keys: | | ||||
| @@ -60,21 +61,21 @@ jobs: | ||||
|           . venv/bin/activate | ||||
|           pre-commit install-hooks | ||||
|  | ||||
|   lint-black: | ||||
|     name: Check black | ||||
|   lint-ruff-format: | ||||
|     name: Check ruff-format | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
| @@ -84,10 +85,67 @@ jobs: | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Run black | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           key: | | ||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||
|       - name: Fail job if cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Run ruff-format | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           black --target-version py38 --check supervisor tests setup.py | ||||
|           pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure | ||||
|         env: | ||||
|           RUFF_OUTPUT_FORMAT: github | ||||
|  | ||||
|   lint-ruff: | ||||
|     name: Check ruff | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
|             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} | ||||
|       - name: Fail job if Python cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           key: | | ||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||
|       - name: Fail job if cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Run ruff | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure | ||||
|         env: | ||||
|           RUFF_OUTPUT_FORMAT: github | ||||
|  | ||||
|   lint-dockerfile: | ||||
|     name: Check Dockerfile | ||||
| @@ -95,7 +153,7 @@ jobs: | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Register hadolint problem matcher | ||||
|         run: | | ||||
|           echo "::add-matcher::.github/workflows/matchers/hadolint.json" | ||||
| @@ -110,15 +168,15 @@ jobs: | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
| @@ -130,9 +188,9 @@ jobs: | ||||
|           exit 1 | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_HOME }} | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           key: | | ||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||
|       - name: Fail job if cache restore failed | ||||
| @@ -148,94 +206,21 @@ jobs: | ||||
|           . venv/bin/activate | ||||
|           pre-commit run --hook-stage manual check-executables-have-shebangs --all-files | ||||
|  | ||||
|   lint-flake8: | ||||
|     name: Check flake8 | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
|             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} | ||||
|       - name: Fail job if Python cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Register flake8 problem matcher | ||||
|         run: | | ||||
|           echo "::add-matcher::.github/workflows/matchers/flake8.json" | ||||
|       - name: Run flake8 | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           flake8 supervisor tests | ||||
|  | ||||
|   lint-isort: | ||||
|     name: Check isort | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
|             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} | ||||
|       - name: Fail job if Python cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_HOME }} | ||||
|           key: | | ||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||
|       - name: Fail job if cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Run isort | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure | ||||
|  | ||||
|   lint-json: | ||||
|     name: Check JSON | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
| @@ -247,9 +232,9 @@ jobs: | ||||
|           exit 1 | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_HOME }} | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           key: | | ||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||
|       - name: Fail job if cache restore failed | ||||
| @@ -271,15 +256,15 @@ jobs: | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
| @@ -297,66 +282,25 @@ jobs: | ||||
|           . venv/bin/activate | ||||
|           pylint supervisor tests | ||||
|  | ||||
|   lint-pyupgrade: | ||||
|     name: Check pyupgrade | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: prepare | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
|             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} | ||||
|       - name: Fail job if Python cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_HOME }} | ||||
|           key: | | ||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||
|       - name: Fail job if cache restore failed | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Run pyupgrade | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure | ||||
|  | ||||
|   pytest: | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: prepare | ||||
|     name: Run tests Python ${{ needs.prepare.outputs.python-version }} | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Install Cosign | ||||
|         uses: sigstore/cosign-installer@v3.1.1 | ||||
|         uses: sigstore/cosign-installer@v3.7.0 | ||||
|         with: | ||||
|           cosign-release: "v2.0.2" | ||||
|           cosign-release: "v2.4.0" | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
| @@ -369,7 +313,7 @@ jobs: | ||||
|       - name: Install additional system dependencies | ||||
|         run: | | ||||
|           sudo apt-get update | ||||
|           sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11 | ||||
|           sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon | ||||
|       - name: Register Python problem matcher | ||||
|         run: | | ||||
|           echo "::add-matcher::.github/workflows/matchers/python.json" | ||||
| @@ -391,10 +335,11 @@ jobs: | ||||
|             -o console_output_style=count \ | ||||
|             tests | ||||
|       - name: Upload coverage artifact | ||||
|         uses: actions/upload-artifact@v3.1.2 | ||||
|         uses: actions/upload-artifact@v4.4.3 | ||||
|         with: | ||||
|           name: coverage-${{ matrix.python-version }} | ||||
|           path: .coverage | ||||
|           include-hidden-files: true | ||||
|  | ||||
|   coverage: | ||||
|     name: Process test coverage | ||||
| @@ -402,15 +347,15 @@ jobs: | ||||
|     needs: ["pytest", "prepare"] | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||
|         uses: actions/setup-python@v4.7.0 | ||||
|         uses: actions/setup-python@v5.2.0 | ||||
|         id: python | ||||
|         with: | ||||
|           python-version: ${{ needs.prepare.outputs.python-version }} | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v3.3.1 | ||||
|         uses: actions/cache@v4.1.1 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: | | ||||
| @@ -421,7 +366,7 @@ jobs: | ||||
|           echo "Failed to restore Python virtual environment from cache" | ||||
|           exit 1 | ||||
|       - name: Download all coverage artifacts | ||||
|         uses: actions/download-artifact@v3 | ||||
|         uses: actions/download-artifact@v4.1.8 | ||||
|       - name: Combine coverage results | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
| @@ -429,4 +374,4 @@ jobs: | ||||
|           coverage report | ||||
|           coverage xml | ||||
|       - name: Upload coverage to Codecov | ||||
|         uses: codecov/codecov-action@v3.1.4 | ||||
|         uses: codecov/codecov-action@v4.6.0 | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							| @@ -9,7 +9,7 @@ jobs: | ||||
|   lock: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: dessant/lock-threads@v4.0.1 | ||||
|       - uses: dessant/lock-threads@v5.0.1 | ||||
|         with: | ||||
|           github-token: ${{ github.token }} | ||||
|           issue-inactive-days: "30" | ||||
|   | ||||
							
								
								
									
										30
									
								
								.github/workflows/matchers/flake8.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										30
									
								
								.github/workflows/matchers/flake8.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,30 +0,0 @@ | ||||
| { | ||||
|   "problemMatcher": [ | ||||
|     { | ||||
|       "owner": "flake8-error", | ||||
|       "severity": "error", | ||||
|       "pattern": [ | ||||
|         { | ||||
|           "regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$", | ||||
|           "file": 1, | ||||
|           "line": 2, | ||||
|           "column": 3, | ||||
|           "message": 4 | ||||
|         } | ||||
|       ] | ||||
|     }, | ||||
|     { | ||||
|       "owner": "flake8-warning", | ||||
|       "severity": "warning", | ||||
|       "pattern": [ | ||||
|         { | ||||
|           "regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$", | ||||
|           "file": 1, | ||||
|           "line": 2, | ||||
|           "column": 3, | ||||
|           "message": 4 | ||||
|         } | ||||
|       ] | ||||
|     } | ||||
|   ] | ||||
| } | ||||
							
								
								
									
										6
									
								
								.github/workflows/release-drafter.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/release-drafter.yml
									
									
									
									
										vendored
									
									
								
							| @@ -11,7 +11,7 @@ jobs: | ||||
|     name: Release Drafter | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|  | ||||
| @@ -33,10 +33,10 @@ jobs: | ||||
|  | ||||
|           echo Current version:    $latest | ||||
|           echo New target version: $datepre.$newpost | ||||
|           echo "::set-output name=version::$datepre.$newpost" | ||||
|           echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT" | ||||
|  | ||||
|       - name: Run Release Drafter | ||||
|         uses: release-drafter/release-drafter@v5.24.0 | ||||
|         uses: release-drafter/release-drafter@v6.0.0 | ||||
|         with: | ||||
|           tag: ${{ steps.version.outputs.version }} | ||||
|           name: ${{ steps.version.outputs.version }} | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/sentry.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/sentry.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -10,9 +10,9 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v3.5.3 | ||||
|         uses: actions/checkout@v4.2.1 | ||||
|       - name: Sentry Release | ||||
|         uses: getsentry/action-release@v1.4.1 | ||||
|         uses: getsentry/action-release@v1.7.0 | ||||
|         env: | ||||
|           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} | ||||
|           SENTRY_ORG: ${{ secrets.SENTRY_ORG }} | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							| @@ -9,7 +9,7 @@ jobs: | ||||
|   stale: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/stale@v8.0.0 | ||||
|       - uses: actions/stale@v9.0.0 | ||||
|         with: | ||||
|           repo-token: ${{ secrets.GITHUB_TOKEN }} | ||||
|           days-before-stale: 30 | ||||
|   | ||||
| @@ -3,4 +3,5 @@ ignored: | ||||
|   - DL3006 | ||||
|   - DL3013 | ||||
|   - DL3018 | ||||
|   - DL3042 | ||||
|   - SC2155 | ||||
|   | ||||
| @@ -1,34 +1,15 @@ | ||||
| repos: | ||||
|   - repo: https://github.com/psf/black | ||||
|     rev: 23.1.0 | ||||
|   - repo: https://github.com/astral-sh/ruff-pre-commit | ||||
|     rev: v0.5.7 | ||||
|     hooks: | ||||
|       - id: black | ||||
|       - id: ruff | ||||
|         args: | ||||
|           - --safe | ||||
|           - --quiet | ||||
|           - --target-version | ||||
|           - py310 | ||||
|           - --fix | ||||
|       - id: ruff-format | ||||
|         files: ^((supervisor|tests)/.+)?[^/]+\.py$ | ||||
|   - repo: https://github.com/PyCQA/flake8 | ||||
|     rev: 6.0.0 | ||||
|     hooks: | ||||
|       - id: flake8 | ||||
|         additional_dependencies: | ||||
|           - flake8-docstrings==1.7.0 | ||||
|           - pydocstyle==6.3.0 | ||||
|         files: ^(supervisor|script|tests)/.+\.py$ | ||||
|   - repo: https://github.com/pre-commit/pre-commit-hooks | ||||
|     rev: v4.3.0 | ||||
|     rev: v4.5.0 | ||||
|     hooks: | ||||
|       - id: check-executables-have-shebangs | ||||
|         stages: [manual] | ||||
|       - id: check-json | ||||
|   - repo: https://github.com/PyCQA/isort | ||||
|     rev: 5.12.0 | ||||
|     hooks: | ||||
|       - id: isort | ||||
|   - repo: https://github.com/asottile/pyupgrade | ||||
|     rev: v3.4.0 | ||||
|     hooks: | ||||
|       - id: pyupgrade | ||||
|         args: [--py310-plus] | ||||
|   | ||||
							
								
								
									
										18
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @@ -58,9 +58,23 @@ | ||||
|       "problemMatcher": [] | ||||
|     }, | ||||
|     { | ||||
|       "label": "Flake8", | ||||
|       "label": "Ruff Check", | ||||
|       "type": "shell", | ||||
|       "command": "flake8 supervisor tests", | ||||
|       "command": "ruff check --fix supervisor tests", | ||||
|       "group": { | ||||
|         "kind": "test", | ||||
|         "isDefault": true | ||||
|       }, | ||||
|       "presentation": { | ||||
|         "reveal": "always", | ||||
|         "panel": "new" | ||||
|       }, | ||||
|       "problemMatcher": [] | ||||
|     }, | ||||
|     { | ||||
|       "label": "Ruff Format", | ||||
|       "type": "shell", | ||||
|       "command": "ruff format supervisor tests", | ||||
|       "group": { | ||||
|         "kind": "test", | ||||
|         "isDefault": true | ||||
|   | ||||
							
								
								
									
										19
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								Dockerfile
									
									
									
									
									
								
							| @@ -4,7 +4,8 @@ FROM ${BUILD_FROM} | ||||
| ENV \ | ||||
|     S6_SERVICES_GRACETIME=10000 \ | ||||
|     SUPERVISOR_API=http://localhost \ | ||||
|     CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 | ||||
|     CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \ | ||||
|     UV_SYSTEM_PYTHON=true | ||||
|  | ||||
| ARG \ | ||||
|     COSIGN_VERSION \ | ||||
| @@ -15,6 +16,7 @@ WORKDIR /usr/src | ||||
| RUN \ | ||||
|     set -x \ | ||||
|     && apk add --no-cache \ | ||||
|         findutils \ | ||||
|         eudev \ | ||||
|         eudev-libs \ | ||||
|         git \ | ||||
| @@ -22,23 +24,26 @@ RUN \ | ||||
|         libpulse \ | ||||
|         musl \ | ||||
|         openssl \ | ||||
|         yaml \ | ||||
|     \ | ||||
|     && curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \ | ||||
|     && chmod a+x /usr/bin/cosign | ||||
|     && chmod a+x /usr/bin/cosign \ | ||||
|     && pip3 install uv==0.2.21 | ||||
|  | ||||
| # Install requirements | ||||
| COPY requirements.txt . | ||||
| RUN \ | ||||
|     export MAKEFLAGS="-j$(nproc)" \ | ||||
|     && pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \ | ||||
|         "https://wheels.home-assistant.io/musllinux/" \ | ||||
|         -r ./requirements.txt \ | ||||
|     if [ "${BUILD_ARCH}" = "i386" ]; then \ | ||||
|         linux32 uv pip install --no-build -r requirements.txt; \ | ||||
|     else \ | ||||
|         uv pip install --no-build -r requirements.txt; \ | ||||
|     fi \ | ||||
|     && rm -f requirements.txt | ||||
|  | ||||
| # Install Home Assistant Supervisor | ||||
| COPY . supervisor | ||||
| RUN \ | ||||
|     pip3 install --no-cache-dir -e ./supervisor \ | ||||
|     pip3 install -e ./supervisor \ | ||||
|     && python3 -m compileall ./supervisor/supervisor | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -30,3 +30,5 @@ Releases are done in 3 stages (channels) with this structure: | ||||
|  | ||||
| [development]: https://developers.home-assistant.io/docs/supervisor/development | ||||
| [stable]: https://github.com/home-assistant/version/blob/master/stable.json | ||||
|  | ||||
| [](https://www.openhomefoundation.org/) | ||||
|   | ||||
							
								
								
									
										12
									
								
								build.yaml
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								build.yaml
									
									
									
									
									
								
							| @@ -1,10 +1,10 @@ | ||||
| image: ghcr.io/home-assistant/{arch}-hassio-supervisor | ||||
| build_from: | ||||
|   aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16 | ||||
|   armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16 | ||||
|   armv7: ghcr.io/home-assistant/armv7-base-python:3.11-alpine3.16 | ||||
|   amd64: ghcr.io/home-assistant/amd64-base-python:3.11-alpine3.16 | ||||
|   i386: ghcr.io/home-assistant/i386-base-python:3.11-alpine3.16 | ||||
|   aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20 | ||||
|   armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20 | ||||
|   armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20 | ||||
|   amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20 | ||||
|   i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20 | ||||
| codenotary: | ||||
|   signer: notary@home-assistant.io | ||||
|   base_image: notary@home-assistant.io | ||||
| @@ -12,7 +12,7 @@ cosign: | ||||
|   base_identity: https://github.com/home-assistant/docker-base/.* | ||||
|   identity: https://github.com/home-assistant/supervisor/.* | ||||
| args: | ||||
|   COSIGN_VERSION: 2.0.2 | ||||
|   COSIGN_VERSION: 2.4.0 | ||||
| labels: | ||||
|   io.hass.type: supervisor | ||||
|   org.opencontainers.image.title: Home Assistant Supervisor | ||||
|   | ||||
							
								
								
									
										45
									
								
								pylintrc
									
									
									
									
									
								
							
							
						
						
									
										45
									
								
								pylintrc
									
									
									
									
									
								
							| @@ -1,45 +0,0 @@ | ||||
| [MASTER] | ||||
| reports=no | ||||
| jobs=2 | ||||
|  | ||||
| good-names=id,i,j,k,ex,Run,_,fp,T,os | ||||
|  | ||||
| extension-pkg-whitelist= | ||||
|   ciso8601 | ||||
|  | ||||
| # Reasons disabled: | ||||
| # format - handled by black | ||||
| # locally-disabled - it spams too much | ||||
| # duplicate-code - unavoidable | ||||
| # cyclic-import - doesn't test if both import on load | ||||
| # abstract-class-not-used - is flaky, should not show up but does | ||||
| # unused-argument - generic callbacks and setup methods create a lot of warnings | ||||
| # too-many-* - are not enforced for the sake of readability | ||||
| # too-few-* - same as too-many-* | ||||
| # abstract-method - with intro of async there are always methods missing | ||||
| disable= | ||||
|   format, | ||||
|   abstract-method, | ||||
|   cyclic-import, | ||||
|   duplicate-code, | ||||
|   locally-disabled, | ||||
|   no-else-return, | ||||
|   not-context-manager, | ||||
|   too-few-public-methods, | ||||
|   too-many-arguments, | ||||
|   too-many-branches, | ||||
|   too-many-instance-attributes, | ||||
|   too-many-lines, | ||||
|   too-many-locals, | ||||
|   too-many-public-methods, | ||||
|   too-many-return-statements, | ||||
|   too-many-statements, | ||||
|   unused-argument, | ||||
|   consider-using-with | ||||
|  | ||||
| [EXCEPTIONS] | ||||
| overgeneral-exceptions=builtins.Exception | ||||
|  | ||||
|  | ||||
| [TYPECHECK] | ||||
| ignored-modules = distutils | ||||
							
								
								
									
										373
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										373
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,373 @@ | ||||
| [build-system] | ||||
| requires = ["setuptools~=68.0.0", "wheel~=0.40.0"] | ||||
| build-backend = "setuptools.build_meta" | ||||
|  | ||||
| [project] | ||||
| name = "Supervisor" | ||||
| dynamic = ["version", "dependencies"] | ||||
| license = { text = "Apache-2.0" } | ||||
| description = "Open-source private cloud os for Home-Assistant based on HassOS" | ||||
| readme = "README.md" | ||||
| authors = [ | ||||
|     { name = "The Home Assistant Authors", email = "hello@home-assistant.io" }, | ||||
| ] | ||||
| keywords = ["docker", "home-assistant", "api"] | ||||
| requires-python = ">=3.12.0" | ||||
|  | ||||
| [project.urls] | ||||
| "Homepage" = "https://www.home-assistant.io/" | ||||
| "Source Code" = "https://github.com/home-assistant/supervisor" | ||||
| "Bug Reports" = "https://github.com/home-assistant/supervisor/issues" | ||||
| "Docs: Dev" = "https://developers.home-assistant.io/" | ||||
| "Discord" = "https://www.home-assistant.io/join-chat/" | ||||
| "Forum" = "https://community.home-assistant.io/" | ||||
|  | ||||
| [tool.setuptools] | ||||
| platforms = ["any"] | ||||
| zip-safe = false | ||||
| include-package-data = true | ||||
|  | ||||
| [tool.setuptools.packages.find] | ||||
| include = ["supervisor*"] | ||||
|  | ||||
| [tool.pylint.MAIN] | ||||
| py-version = "3.12" | ||||
| # Use a conservative default here; 2 should speed up most setups and not hurt | ||||
| # any too bad. Override on command line as appropriate. | ||||
| jobs = 2 | ||||
| persistent = false | ||||
| extension-pkg-allow-list = ["ciso8601"] | ||||
|  | ||||
| [tool.pylint.BASIC] | ||||
| class-const-naming-style = "any" | ||||
| good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"] | ||||
|  | ||||
| [tool.pylint."MESSAGES CONTROL"] | ||||
| # Reasons disabled: | ||||
| # format - handled by ruff | ||||
| # abstract-method - with intro of async there are always methods missing | ||||
| # cyclic-import - doesn't test if both import on load | ||||
| # duplicate-code - unavoidable | ||||
| # locally-disabled - it spams too much | ||||
| # too-many-* - are not enforced for the sake of readability | ||||
| # too-few-* - same as too-many-* | ||||
| # unused-argument - generic callbacks and setup methods create a lot of warnings | ||||
| disable = [ | ||||
|     "format", | ||||
|     "abstract-method", | ||||
|     "cyclic-import", | ||||
|     "duplicate-code", | ||||
|     "locally-disabled", | ||||
|     "no-else-return", | ||||
|     "not-context-manager", | ||||
|     "too-few-public-methods", | ||||
|     "too-many-arguments", | ||||
|     "too-many-branches", | ||||
|     "too-many-instance-attributes", | ||||
|     "too-many-lines", | ||||
|     "too-many-locals", | ||||
|     "too-many-public-methods", | ||||
|     "too-many-return-statements", | ||||
|     "too-many-statements", | ||||
|     "unused-argument", | ||||
|     "consider-using-with", | ||||
|  | ||||
|     # Handled by ruff | ||||
|     # Ref: <https://github.com/astral-sh/ruff/issues/970> | ||||
|     "await-outside-async",    # PLE1142 | ||||
|     "bad-str-strip-call",     # PLE1310 | ||||
|     "bad-string-format-type", # PLE1307 | ||||
|     "bidirectional-unicode",  # PLE2502 | ||||
|     "continue-in-finally",    # PLE0116 | ||||
|     "duplicate-bases",        # PLE0241 | ||||
|     "format-needs-mapping",   # F502 | ||||
|     "function-redefined",     # F811 | ||||
|     # Needed because ruff does not understand type of __all__ generated by a function | ||||
|     # "invalid-all-format", # PLE0605 | ||||
|     "invalid-all-object",                 # PLE0604 | ||||
|     "invalid-character-backspace",        # PLE2510 | ||||
|     "invalid-character-esc",              # PLE2513 | ||||
|     "invalid-character-nul",              # PLE2514 | ||||
|     "invalid-character-sub",              # PLE2512 | ||||
|     "invalid-character-zero-width-space", # PLE2515 | ||||
|     "logging-too-few-args",               # PLE1206 | ||||
|     "logging-too-many-args",              # PLE1205 | ||||
|     "missing-format-string-key",          # F524 | ||||
|     "mixed-format-string",                # F506 | ||||
|     "no-method-argument",                 # N805 | ||||
|     "no-self-argument",                   # N805 | ||||
|     "nonexistent-operator",               # B002 | ||||
|     "nonlocal-without-binding",           # PLE0117 | ||||
|     "not-in-loop",                        # F701, F702 | ||||
|     "notimplemented-raised",              # F901 | ||||
|     "return-in-init",                     # PLE0101 | ||||
|     "return-outside-function",            # F706 | ||||
|     "syntax-error",                       # E999 | ||||
|     "too-few-format-args",                # F524 | ||||
|     "too-many-format-args",               # F522 | ||||
|     "too-many-star-expressions",          # F622 | ||||
|     "truncated-format-string",            # F501 | ||||
|     "undefined-all-variable",             # F822 | ||||
|     "undefined-variable",                 # F821 | ||||
|     "used-prior-global-declaration",      # PLE0118 | ||||
|     "yield-inside-async-function",        # PLE1700 | ||||
|     "yield-outside-function",             # F704 | ||||
|     "anomalous-backslash-in-string",      # W605 | ||||
|     "assert-on-string-literal",           # PLW0129 | ||||
|     "assert-on-tuple",                    # F631 | ||||
|     "bad-format-string",                  # W1302, F | ||||
|     "bad-format-string-key",              # W1300, F | ||||
|     "bare-except",                        # E722 | ||||
|     "binary-op-exception",                # PLW0711 | ||||
|     "cell-var-from-loop",                 # B023 | ||||
|     # "dangerous-default-value", # B006, ruff catches new occurrences, needs more work | ||||
|     "duplicate-except",                     # B014 | ||||
|     "duplicate-key",                        # F601 | ||||
|     "duplicate-string-formatting-argument", # F | ||||
|     "duplicate-value",                      # F | ||||
|     "eval-used",                            # PGH001 | ||||
|     "exec-used",                            # S102 | ||||
|     # "expression-not-assigned", # B018, ruff catches new occurrences, needs more work | ||||
|     "f-string-without-interpolation",      # F541 | ||||
|     "forgotten-debug-statement",           # T100 | ||||
|     "format-string-without-interpolation", # F | ||||
|     # "global-statement", # PLW0603, ruff catches new occurrences, needs more work | ||||
|     "global-variable-not-assigned",  # PLW0602 | ||||
|     "implicit-str-concat",           # ISC001 | ||||
|     "import-self",                   # PLW0406 | ||||
|     "inconsistent-quotes",           # Q000 | ||||
|     "invalid-envvar-default",        # PLW1508 | ||||
|     "keyword-arg-before-vararg",     # B026 | ||||
|     "logging-format-interpolation",  # G | ||||
|     "logging-fstring-interpolation", # G | ||||
|     "logging-not-lazy",              # G | ||||
|     "misplaced-future",              # F404 | ||||
|     "named-expr-without-context",    # PLW0131 | ||||
|     "nested-min-max",                # PLW3301 | ||||
|     # "pointless-statement", # B018, ruff catches new occurrences, needs more work | ||||
|     "raise-missing-from", # TRY200 | ||||
|     # "redefined-builtin", # A001, ruff is way more stricter, needs work | ||||
|     "try-except-raise",               # TRY302 | ||||
|     "unused-argument",                # ARG001, we don't use it | ||||
|     "unused-format-string-argument",  #F507 | ||||
|     "unused-format-string-key",       # F504 | ||||
|     "unused-import",                  # F401 | ||||
|     "unused-variable",                # F841 | ||||
|     "useless-else-on-loop",           # PLW0120 | ||||
|     "wildcard-import",                # F403 | ||||
|     "bad-classmethod-argument",       # N804 | ||||
|     "consider-iterating-dictionary",  # SIM118 | ||||
|     "empty-docstring",                # D419 | ||||
|     "invalid-name",                   # N815 | ||||
|     "line-too-long",                  # E501, disabled globally | ||||
|     "missing-class-docstring",        # D101 | ||||
|     "missing-final-newline",          # W292 | ||||
|     "missing-function-docstring",     # D103 | ||||
|     "missing-module-docstring",       # D100 | ||||
|     "multiple-imports",               #E401 | ||||
|     "singleton-comparison",           # E711, E712 | ||||
|     "subprocess-run-check",           # PLW1510 | ||||
|     "superfluous-parens",             # UP034 | ||||
|     "ungrouped-imports",              # I001 | ||||
|     "unidiomatic-typecheck",          # E721 | ||||
|     "unnecessary-direct-lambda-call", # PLC3002 | ||||
|     "unnecessary-lambda-assignment",  # PLC3001 | ||||
|     "unneeded-not",                   # SIM208 | ||||
|     "useless-import-alias",           # PLC0414 | ||||
|     "wrong-import-order",             # I001 | ||||
|     "wrong-import-position",          # E402 | ||||
|     "comparison-of-constants",        # PLR0133 | ||||
|     "comparison-with-itself",         # PLR0124 | ||||
|     # "consider-alternative-union-syntax", # UP007, typing extension | ||||
|     "consider-merging-isinstance", # PLR1701 | ||||
|     # "consider-using-alias",              # UP006, typing extension | ||||
|     "consider-using-dict-comprehension", # C402 | ||||
|     "consider-using-generator",          # C417 | ||||
|     "consider-using-get",                # SIM401 | ||||
|     "consider-using-set-comprehension",  # C401 | ||||
|     "consider-using-sys-exit",           # PLR1722 | ||||
|     "consider-using-ternary",            # SIM108 | ||||
|     "literal-comparison",                # F632 | ||||
|     "property-with-parameters",          # PLR0206 | ||||
|     "super-with-arguments",              # UP008 | ||||
|     "too-many-branches",                 # PLR0912 | ||||
|     "too-many-return-statements",        # PLR0911 | ||||
|     "too-many-statements",               # PLR0915 | ||||
|     "trailing-comma-tuple",              # COM818 | ||||
|     "unnecessary-comprehension",         # C416 | ||||
|     "use-a-generator",                   # C417 | ||||
|     "use-dict-literal",                  # C406 | ||||
|     "use-list-literal",                  # C405 | ||||
|     "useless-object-inheritance",        # UP004 | ||||
|     "useless-return",                    # PLR1711 | ||||
|     # "no-self-use", # PLR6301  # Optional plugin, not enabled | ||||
| ] | ||||
|  | ||||
| [tool.pylint.REPORTS] | ||||
| score = false | ||||
|  | ||||
| [tool.pylint.TYPECHECK] | ||||
| ignored-modules = ["distutils"] | ||||
|  | ||||
| [tool.pylint.FORMAT] | ||||
| expected-line-ending-format = "LF" | ||||
|  | ||||
| [tool.pylint.EXCEPTIONS] | ||||
| overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] | ||||
|  | ||||
| [tool.pylint.DESIGN] | ||||
| max-positional-arguments = 10 | ||||
|  | ||||
| [tool.pytest.ini_options] | ||||
| testpaths = ["tests"] | ||||
| norecursedirs = [".git"] | ||||
| log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" | ||||
| log_date_format = "%Y-%m-%d %H:%M:%S" | ||||
| asyncio_mode = "auto" | ||||
| filterwarnings = [ | ||||
|     "error", | ||||
|     "ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash", | ||||
|     "ignore::pytest.PytestUnraisableExceptionWarning", | ||||
| ] | ||||
|  | ||||
| [tool.ruff] | ||||
| lint.select = [ | ||||
|     "B002",    # Python does not support the unary prefix increment | ||||
|     "B007",    # Loop control variable {name} not used within loop body | ||||
|     "B014",    # Exception handler with duplicate exception | ||||
|     "B023",    # Function definition does not bind loop variable {name} | ||||
|     "B026",    # Star-arg unpacking after a keyword argument is strongly discouraged | ||||
|     "B904",    # Use raise from to specify exception cause | ||||
|     "C",       # complexity | ||||
|     "COM818",  # Trailing comma on bare tuple prohibited | ||||
|     "D",       # docstrings | ||||
|     "DTZ003",  # Use datetime.now(tz=) instead of datetime.utcnow() | ||||
|     "DTZ004",  # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) | ||||
|     "E",       # pycodestyle | ||||
|     "F",       # pyflakes/autoflake | ||||
|     "G",       # flake8-logging-format | ||||
|     "I",       # isort | ||||
|     "ICN001",  # import concentions; {name} should be imported as {asname} | ||||
|     "N804",    # First argument of a class method should be named cls | ||||
|     "N805",    # First argument of a method should be named self | ||||
|     "N815",    # Variable {name} in class scope should not be mixedCase | ||||
|     "PGH004",  # Use specific rule codes when using noqa | ||||
|     "PLC0414", # Useless import alias. Import alias does not rename original package. | ||||
|     "PLC",     # pylint | ||||
|     "PLE",     # pylint | ||||
|     "PLR",     # pylint | ||||
|     "PLW",     # pylint | ||||
|     "Q000",    # Double quotes found but single quotes preferred | ||||
|     "RUF006",  # Store a reference to the return value of asyncio.create_task | ||||
|     "S102",    # Use of exec detected | ||||
|     "S103",    # bad-file-permissions | ||||
|     "S108",    # hardcoded-temp-file | ||||
|     "S306",    # suspicious-mktemp-usage | ||||
|     "S307",    # suspicious-eval-usage | ||||
|     "S313",    # suspicious-xmlc-element-tree-usage | ||||
|     "S314",    # suspicious-xml-element-tree-usage | ||||
|     "S315",    # suspicious-xml-expat-reader-usage | ||||
|     "S316",    # suspicious-xml-expat-builder-usage | ||||
|     "S317",    # suspicious-xml-sax-usage | ||||
|     "S318",    # suspicious-xml-mini-dom-usage | ||||
|     "S319",    # suspicious-xml-pull-dom-usage | ||||
|     "S320",    # suspicious-xmle-tree-usage | ||||
|     "S601",    # paramiko-call | ||||
|     "S602",    # subprocess-popen-with-shell-equals-true | ||||
|     "S604",    # call-with-shell-equals-true | ||||
|     "S608",    # hardcoded-sql-expression | ||||
|     "S609",    # unix-command-wildcard-injection | ||||
|     "SIM105",  # Use contextlib.suppress({exception}) instead of try-except-pass | ||||
|     "SIM117",  # Merge with-statements that use the same scope | ||||
|     "SIM118",  # Use {key} in {dict} instead of {key} in {dict}.keys() | ||||
|     "SIM201",  # Use {left} != {right} instead of not {left} == {right} | ||||
|     "SIM208",  # Use {expr} instead of not (not {expr}) | ||||
|     "SIM212",  # Use {a} if {a} else {b} instead of {b} if not {a} else {a} | ||||
|     "SIM300",  # Yoda conditions. Use 'age == 42' instead of '42 == age'. | ||||
|     "SIM401",  # Use get from dict with default instead of an if block | ||||
|     "T100",    # Trace found: {name} used | ||||
|     "T20",     # flake8-print | ||||
|     "TID251",  # Banned imports | ||||
|     "TRY004",  # Prefer TypeError exception for invalid type | ||||
|     "TRY302",  # Remove exception handler; error is immediately re-raised | ||||
|     "UP",      # pyupgrade | ||||
|     "W",       # pycodestyle | ||||
| ] | ||||
|  | ||||
| lint.ignore = [ | ||||
|     "D202", # No blank lines allowed after function docstring | ||||
|     "D203", # 1 blank line required before class docstring | ||||
|     "D213", # Multi-line docstring summary should start at the second line | ||||
|     "D406", # Section name should end with a newline | ||||
|     "D407", # Section name underlining | ||||
|     "E501", # line too long | ||||
|     "E731", # do not assign a lambda expression, use a def | ||||
|  | ||||
|     # Ignore ignored, as the rule is now back in preview/nursery, which cannot | ||||
|     # be ignored anymore without warnings. | ||||
|     # https://github.com/astral-sh/ruff/issues/7491 | ||||
|     # "PLC1901", # Lots of false positives | ||||
|  | ||||
|     # False positives https://github.com/astral-sh/ruff/issues/5386 | ||||
|     "PLC0208", # Use a sequence type instead of a `set` when iterating over values | ||||
|     "PLR0911", # Too many return statements ({returns} > {max_returns}) | ||||
|     "PLR0912", # Too many branches ({branches} > {max_branches}) | ||||
|     "PLR0913", # Too many arguments to function call ({c_args} > {max_args}) | ||||
|     "PLR0915", # Too many statements ({statements} > {max_statements}) | ||||
|     "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable | ||||
|     "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target | ||||
|     "UP006",   # keep type annotation style as is | ||||
|     "UP007",   # keep type annotation style as is | ||||
|     # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 | ||||
|     "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` | ||||
|  | ||||
|     # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules | ||||
|     "W191", | ||||
|     "E111", | ||||
|     "E114", | ||||
|     "E117", | ||||
|     "D206", | ||||
|     "D300", | ||||
|     "Q000", | ||||
|     "Q001", | ||||
|     "Q002", | ||||
|     "Q003", | ||||
|     "COM812", | ||||
|     "COM819", | ||||
|     "ISC001", | ||||
|     "ISC002", | ||||
|  | ||||
|     # Disabled because ruff does not understand type of __all__ generated by a function | ||||
|     "PLE0605", | ||||
| ] | ||||
|  | ||||
| [tool.ruff.lint.flake8-import-conventions.extend-aliases] | ||||
| voluptuous = "vol" | ||||
|  | ||||
| [tool.ruff.lint.flake8-pytest-style] | ||||
| fixture-parentheses = false | ||||
|  | ||||
| [tool.ruff.lint.flake8-tidy-imports.banned-api] | ||||
| "pytz".msg = "use zoneinfo instead" | ||||
|  | ||||
| [tool.ruff.lint.isort] | ||||
| force-sort-within-sections = true | ||||
| section-order = [ | ||||
|     "future", | ||||
|     "standard-library", | ||||
|     "third-party", | ||||
|     "first-party", | ||||
|     "local-folder", | ||||
| ] | ||||
| forced-separate = ["tests"] | ||||
| known-first-party = ["supervisor", "tests"] | ||||
| combine-as-imports = true | ||||
| split-on-trailing-comma = false | ||||
|  | ||||
| [tool.ruff.lint.per-file-ignores] | ||||
|  | ||||
| # DBus Service Mocks must use typing and names understood by dbus-fast | ||||
| "tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"] | ||||
|  | ||||
| [tool.ruff.lint.mccabe] | ||||
| max-complexity = 25 | ||||
| @@ -1,2 +0,0 @@ | ||||
| [pytest] | ||||
| asyncio_mode = auto | ||||
| @@ -1,26 +1,29 @@ | ||||
| aiodns==3.0.0 | ||||
| aiohttp==3.8.4 | ||||
| async_timeout==4.0.2 | ||||
| aiodns==3.2.0 | ||||
| aiohttp==3.10.10 | ||||
| atomicwrites-homeassistant==1.4.1 | ||||
| attrs==23.1.0 | ||||
| awesomeversion==23.5.0 | ||||
| brotli==1.0.9 | ||||
| ciso8601==2.3.0 | ||||
| colorlog==6.7.0 | ||||
| cpe==1.2.1 | ||||
| cryptography==41.0.2 | ||||
| debugpy==1.6.7 | ||||
| deepmerge==1.1.0 | ||||
| dirhash==0.2.1 | ||||
| docker==6.1.3 | ||||
| faust-cchardet==2.1.18 | ||||
| gitpython==3.1.32 | ||||
| jinja2==3.1.2 | ||||
| pulsectl==23.5.2 | ||||
| pyudev==0.24.1 | ||||
| ruamel.yaml==0.17.21 | ||||
| securetar==2023.3.0 | ||||
| sentry-sdk==1.29.0 | ||||
| voluptuous==0.13.1 | ||||
| dbus-fast==1.86.0 | ||||
| typing_extensions==4.7.1 | ||||
| attrs==24.2.0 | ||||
| awesomeversion==24.6.0 | ||||
| brotli==1.1.0 | ||||
| ciso8601==2.3.1 | ||||
| colorlog==6.8.2 | ||||
| cpe==1.3.1 | ||||
| cryptography==43.0.1 | ||||
| debugpy==1.8.7 | ||||
| deepmerge==2.0 | ||||
| dirhash==0.5.0 | ||||
| docker==7.1.0 | ||||
| faust-cchardet==2.1.19 | ||||
| gitpython==3.1.43 | ||||
| jinja2==3.1.4 | ||||
| orjson==3.10.7 | ||||
| pulsectl==24.8.0 | ||||
| pyudev==0.24.3 | ||||
| PyYAML==6.0.2 | ||||
| requests==2.32.3 | ||||
| securetar==2024.2.1 | ||||
| sentry-sdk==2.16.0 | ||||
| setuptools==75.1.0 | ||||
| voluptuous==0.15.2 | ||||
| dbus-fast==2.24.3 | ||||
| typing_extensions==4.12.2 | ||||
| zlib-fast==0.2.0 | ||||
|   | ||||
| @@ -1,16 +1,12 @@ | ||||
| black==23.7.0 | ||||
| coverage==7.2.7 | ||||
| flake8-docstrings==1.7.0 | ||||
| flake8==6.1.0 | ||||
| pre-commit==3.3.3 | ||||
| pydocstyle==6.3.0 | ||||
| pylint==2.17.5 | ||||
| pytest-aiohttp==1.0.4 | ||||
| pytest-asyncio==0.18.3 | ||||
| pytest-cov==4.1.0 | ||||
| pytest-timeout==2.1.0 | ||||
| pytest==7.4.0 | ||||
| pyupgrade==3.10.1 | ||||
| time-machine==2.11.0 | ||||
| typing_extensions==4.7.1 | ||||
| urllib3==2.0.4 | ||||
| coverage==7.6.3 | ||||
| pre-commit==4.0.1 | ||||
| pylint==3.3.1 | ||||
| pytest-aiohttp==1.0.5 | ||||
| pytest-asyncio==0.23.6 | ||||
| pytest-cov==5.0.0 | ||||
| pytest-timeout==2.3.1 | ||||
| pytest==8.3.3 | ||||
| ruff==0.6.9 | ||||
| time-machine==2.16.0 | ||||
| typing_extensions==4.12.2 | ||||
| urllib3==2.2.3 | ||||
|   | ||||
| @@ -15,7 +15,7 @@ do | ||||
|     if [[ "${supervisor_state}" = "running"  ]]; then | ||||
|  | ||||
|         # Check API | ||||
|         if bashio::supervisor.ping; then | ||||
|         if bashio::supervisor.ping > /dev/null; then | ||||
|             failed_count=0 | ||||
|         else | ||||
|             bashio::log.warning "Maybe found an issue on API healthy" | ||||
|   | ||||
							
								
								
									
										31
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										31
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,31 +0,0 @@ | ||||
| [isort] | ||||
| multi_line_output = 3 | ||||
| include_trailing_comma=True | ||||
| force_grid_wrap=0 | ||||
| line_length=88 | ||||
| indent = "    " | ||||
| force_sort_within_sections = true | ||||
| sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER | ||||
| default_section = THIRDPARTY | ||||
| forced_separate = tests | ||||
| combine_as_imports = true | ||||
| use_parentheses = true | ||||
| known_first_party = supervisor,tests | ||||
|  | ||||
| [flake8] | ||||
| exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build | ||||
| doctests = True | ||||
| max-line-length = 88 | ||||
| # E501: line too long | ||||
| # W503: Line break occurred before a binary operator | ||||
| # E203: Whitespace before ':' | ||||
| # D202 No blank lines allowed after function docstring | ||||
| # W504 line break after binary operator | ||||
| ignore = | ||||
|     E501, | ||||
|     W503, | ||||
|     E203, | ||||
|     D202, | ||||
|     W504 | ||||
| per-file-ignores = | ||||
|     tests/dbus_service_mocks/*.py: F821,F722 | ||||
							
								
								
									
										76
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										76
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,60 +1,28 @@ | ||||
| """Home Assistant Supervisor setup.""" | ||||
|  | ||||
| from pathlib import Path | ||||
| import re | ||||
|  | ||||
| from setuptools import setup | ||||
|  | ||||
| from supervisor.const import SUPERVISOR_VERSION | ||||
| RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$") | ||||
|  | ||||
| SUPERVISOR_DIR = Path(__file__).parent | ||||
| REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt" | ||||
| CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py" | ||||
|  | ||||
| REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8") | ||||
| CONSTANTS = CONST_FILE.read_text(encoding="utf-8") | ||||
|  | ||||
|  | ||||
| def _get_supervisor_version(): | ||||
|     for line in CONSTANTS.split("/n"): | ||||
|         if match := RE_SUPERVISOR_VERSION.match(line): | ||||
|             return match.group(1) | ||||
|     return "99.9.9dev" | ||||
|  | ||||
|  | ||||
| setup( | ||||
|     name="Supervisor", | ||||
|     version=SUPERVISOR_VERSION, | ||||
|     license="BSD License", | ||||
|     author="The Home Assistant Authors", | ||||
|     author_email="hello@home-assistant.io", | ||||
|     url="https://home-assistant.io/", | ||||
|     description=("Open-source private cloud os for Home-Assistant" " based on HassOS"), | ||||
|     long_description=( | ||||
|         "A maintainless private cloud operator system that" | ||||
|         "setup a Home-Assistant instance. Based on HassOS" | ||||
|     ), | ||||
|     classifiers=[ | ||||
|         "Intended Audience :: End Users/Desktop", | ||||
|         "Intended Audience :: Developers", | ||||
|         "License :: OSI Approved :: Apache Software License", | ||||
|         "Operating System :: OS Independent", | ||||
|         "Topic :: Home Automation", | ||||
|         "Topic :: Software Development :: Libraries :: Python Modules", | ||||
|         "Topic :: Scientific/Engineering :: Atmospheric Science", | ||||
|         "Development Status :: 5 - Production/Stable", | ||||
|         "Intended Audience :: Developers", | ||||
|         "Programming Language :: Python :: 3.8", | ||||
|     ], | ||||
|     keywords=["docker", "home-assistant", "api"], | ||||
|     zip_safe=False, | ||||
|     platforms="any", | ||||
|     packages=[ | ||||
|         "supervisor.addons", | ||||
|         "supervisor.api", | ||||
|         "supervisor.backups", | ||||
|         "supervisor.dbus.network", | ||||
|         "supervisor.dbus.network.setting", | ||||
|         "supervisor.dbus", | ||||
|         "supervisor.discovery.services", | ||||
|         "supervisor.discovery", | ||||
|         "supervisor.docker", | ||||
|         "supervisor.homeassistant", | ||||
|         "supervisor.host", | ||||
|         "supervisor.jobs", | ||||
|         "supervisor.misc", | ||||
|         "supervisor.plugins", | ||||
|         "supervisor.resolution.checks", | ||||
|         "supervisor.resolution.evaluations", | ||||
|         "supervisor.resolution.fixups", | ||||
|         "supervisor.resolution", | ||||
|         "supervisor.security", | ||||
|         "supervisor.services.modules", | ||||
|         "supervisor.services", | ||||
|         "supervisor.store", | ||||
|         "supervisor.utils", | ||||
|         "supervisor", | ||||
|     ], | ||||
|     include_package_data=True, | ||||
|     version=_get_supervisor_version(), | ||||
|     dependencies=REQUIREMENTS.split("/n"), | ||||
| ) | ||||
|   | ||||
| @@ -1,11 +1,20 @@ | ||||
| """Main file for Supervisor.""" | ||||
|  | ||||
| import asyncio | ||||
| from concurrent.futures import ThreadPoolExecutor | ||||
| import logging | ||||
| from pathlib import Path | ||||
| import sys | ||||
|  | ||||
| from supervisor import bootstrap | ||||
| import zlib_fast | ||||
|  | ||||
| # Enable fast zlib before importing supervisor | ||||
| zlib_fast.enable() | ||||
|  | ||||
| from supervisor import bootstrap  # pylint: disable=wrong-import-position # noqa: E402 | ||||
| from supervisor.utils.logging import (  # pylint: disable=wrong-import-position  # noqa: E402 | ||||
|     activate_log_queue_handler, | ||||
| ) | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -38,6 +47,8 @@ if __name__ == "__main__": | ||||
|     executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker") | ||||
|     loop.set_default_executor(executor) | ||||
|  | ||||
|     activate_log_queue_handler() | ||||
|  | ||||
|     _LOGGER.info("Initializing Supervisor setup") | ||||
|     coresys = loop.run_until_complete(bootstrap.initialize_coresys()) | ||||
|     loop.set_debug(coresys.config.debug) | ||||
|   | ||||
| @@ -1,462 +1 @@ | ||||
| """Init file for Supervisor add-ons.""" | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from contextlib import suppress | ||||
| import logging | ||||
| import tarfile | ||||
| from typing import Union | ||||
|  | ||||
| from ..const import AddonBoot, AddonStartup, AddonState | ||||
| from ..coresys import CoreSys, CoreSysAttributes | ||||
| from ..exceptions import ( | ||||
|     AddonConfigurationError, | ||||
|     AddonsError, | ||||
|     AddonsJobError, | ||||
|     AddonsNotSupportedError, | ||||
|     CoreDNSError, | ||||
|     DockerAPIError, | ||||
|     DockerError, | ||||
|     DockerNotFound, | ||||
|     HomeAssistantAPIError, | ||||
|     HostAppArmorError, | ||||
| ) | ||||
| from ..jobs.decorator import Job, JobCondition | ||||
| from ..resolution.const import ContextType, IssueType, SuggestionType | ||||
| from ..store.addon import AddonStore | ||||
| from ..utils import check_exception_chain | ||||
| from ..utils.sentry import capture_exception | ||||
| from .addon import Addon | ||||
| from .const import ADDON_UPDATE_CONDITIONS | ||||
| from .data import AddonsData | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| AnyAddon = Union[Addon, AddonStore] | ||||
|  | ||||
|  | ||||
| class AddonManager(CoreSysAttributes): | ||||
|     """Manage add-ons inside Supervisor.""" | ||||
|  | ||||
|     def __init__(self, coresys: CoreSys): | ||||
|         """Initialize Docker base wrapper.""" | ||||
|         self.coresys: CoreSys = coresys | ||||
|         self.data: AddonsData = AddonsData(coresys) | ||||
|         self.local: dict[str, Addon] = {} | ||||
|         self.store: dict[str, AddonStore] = {} | ||||
|  | ||||
|     @property | ||||
|     def all(self) -> list[AnyAddon]: | ||||
|         """Return a list of all add-ons.""" | ||||
|         addons: dict[str, AnyAddon] = {**self.store, **self.local} | ||||
|         return list(addons.values()) | ||||
|  | ||||
|     @property | ||||
|     def installed(self) -> list[Addon]: | ||||
|         """Return a list of all installed add-ons.""" | ||||
|         return list(self.local.values()) | ||||
|  | ||||
|     def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None: | ||||
|         """Return an add-on from slug. | ||||
|  | ||||
|         Prio: | ||||
|           1 - Local | ||||
|           2 - Store | ||||
|         """ | ||||
|         if addon_slug in self.local: | ||||
|             return self.local[addon_slug] | ||||
|         if not local_only: | ||||
|             return self.store.get(addon_slug) | ||||
|         return None | ||||
|  | ||||
|     def from_token(self, token: str) -> Addon | None: | ||||
|         """Return an add-on from Supervisor token.""" | ||||
|         for addon in self.installed: | ||||
|             if token == addon.supervisor_token: | ||||
|                 return addon | ||||
|         return None | ||||
|  | ||||
|     async def load(self) -> None: | ||||
|         """Start up add-on management.""" | ||||
|         tasks = [] | ||||
|         for slug in self.data.system: | ||||
|             addon = self.local[slug] = Addon(self.coresys, slug) | ||||
|             tasks.append(self.sys_create_task(addon.load())) | ||||
|  | ||||
|         # Run initial tasks | ||||
|         _LOGGER.info("Found %d installed add-ons", len(tasks)) | ||||
|         if tasks: | ||||
|             await asyncio.wait(tasks) | ||||
|  | ||||
|         # Sync DNS | ||||
|         await self.sync_dns() | ||||
|  | ||||
|     async def boot(self, stage: AddonStartup) -> None: | ||||
|         """Boot add-ons with mode auto.""" | ||||
|         tasks: list[Addon] = [] | ||||
|         for addon in self.installed: | ||||
|             if addon.boot != AddonBoot.AUTO or addon.startup != stage: | ||||
|                 continue | ||||
|             tasks.append(addon) | ||||
|  | ||||
|         # Evaluate add-ons which need to be started | ||||
|         _LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks)) | ||||
|         if not tasks: | ||||
|             return | ||||
|  | ||||
|         # Start Add-ons sequential | ||||
|         # avoid issue on slow IO | ||||
|         # Config.wait_boot is deprecated. Until addons update with healthchecks, | ||||
|         # add a sleep task for it to keep the same minimum amount of wait time | ||||
|         wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)] | ||||
|         for addon in tasks: | ||||
|             try: | ||||
|                 if start_task := await addon.start(): | ||||
|                     wait_boot.append(start_task) | ||||
|             except AddonsError as err: | ||||
|                 # Check if there is an system/user issue | ||||
|                 if check_exception_chain( | ||||
|                     err, (DockerAPIError, DockerNotFound, AddonConfigurationError) | ||||
|                 ): | ||||
|                     addon.boot = AddonBoot.MANUAL | ||||
|                     addon.save_persist() | ||||
|             except Exception as err:  # pylint: disable=broad-except | ||||
|                 capture_exception(err) | ||||
|             else: | ||||
|                 continue | ||||
|  | ||||
|             _LOGGER.warning("Can't start Add-on %s", addon.slug) | ||||
|  | ||||
|         # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere | ||||
|         await asyncio.gather(*wait_boot, return_exceptions=True) | ||||
|  | ||||
|     async def shutdown(self, stage: AddonStartup) -> None: | ||||
|         """Shutdown addons.""" | ||||
|         tasks: list[Addon] = [] | ||||
|         for addon in self.installed: | ||||
|             if addon.state != AddonState.STARTED or addon.startup != stage: | ||||
|                 continue | ||||
|             tasks.append(addon) | ||||
|  | ||||
|         # Evaluate add-ons which need to be stopped | ||||
|         _LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks)) | ||||
|         if not tasks: | ||||
|             return | ||||
|  | ||||
|         # Stop Add-ons sequential | ||||
|         # avoid issue on slow IO | ||||
|         for addon in tasks: | ||||
|             try: | ||||
|                 await addon.stop() | ||||
|             except Exception as err:  # pylint: disable=broad-except | ||||
|                 _LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err) | ||||
|                 capture_exception(err) | ||||
|  | ||||
|     @Job( | ||||
|         conditions=ADDON_UPDATE_CONDITIONS, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def install(self, slug: str) -> None: | ||||
|         """Install an add-on.""" | ||||
|         if slug in self.local: | ||||
|             raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning) | ||||
|         store = self.store.get(slug) | ||||
|  | ||||
|         if not store: | ||||
|             raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error) | ||||
|  | ||||
|         store.validate_availability() | ||||
|  | ||||
|         self.data.install(store) | ||||
|         addon = Addon(self.coresys, slug) | ||||
|         await addon.load() | ||||
|  | ||||
|         if not addon.path_data.is_dir(): | ||||
|             _LOGGER.info( | ||||
|                 "Creating Home Assistant add-on data folder %s", addon.path_data | ||||
|             ) | ||||
|             addon.path_data.mkdir() | ||||
|  | ||||
|         # Setup/Fix AppArmor profile | ||||
|         await addon.install_apparmor() | ||||
|  | ||||
|         try: | ||||
|             await addon.instance.install(store.version, store.image, arch=addon.arch) | ||||
|         except DockerError as err: | ||||
|             self.data.uninstall(addon) | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         self.local[slug] = addon | ||||
|  | ||||
|         # Reload ingress tokens | ||||
|         if addon.with_ingress: | ||||
|             await self.sys_ingress.reload() | ||||
|  | ||||
|         _LOGGER.info("Add-on '%s' successfully installed", slug) | ||||
|  | ||||
|     async def uninstall(self, slug: str) -> None: | ||||
|         """Remove an add-on.""" | ||||
|         if slug not in self.local: | ||||
|             _LOGGER.warning("Add-on %s is not installed", slug) | ||||
|             return | ||||
|         addon = self.local[slug] | ||||
|  | ||||
|         try: | ||||
|             await addon.instance.remove() | ||||
|         except DockerError as err: | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         addon.state = AddonState.UNKNOWN | ||||
|  | ||||
|         await addon.unload() | ||||
|  | ||||
|         # Cleanup audio settings | ||||
|         if addon.path_pulse.exists(): | ||||
|             with suppress(OSError): | ||||
|                 addon.path_pulse.unlink() | ||||
|  | ||||
|         # Cleanup AppArmor profile | ||||
|         with suppress(HostAppArmorError): | ||||
|             await addon.uninstall_apparmor() | ||||
|  | ||||
|         # Cleanup Ingress panel from sidebar | ||||
|         if addon.ingress_panel: | ||||
|             addon.ingress_panel = False | ||||
|             with suppress(HomeAssistantAPIError): | ||||
|                 await self.sys_ingress.update_hass_panel(addon) | ||||
|  | ||||
|         # Cleanup Ingress dynamic port assignment | ||||
|         if addon.with_ingress: | ||||
|             self.sys_create_task(self.sys_ingress.reload()) | ||||
|             self.sys_ingress.del_dynamic_port(slug) | ||||
|  | ||||
|         # Cleanup discovery data | ||||
|         for message in self.sys_discovery.list_messages: | ||||
|             if message.addon != addon.slug: | ||||
|                 continue | ||||
|             self.sys_discovery.remove(message) | ||||
|  | ||||
|         # Cleanup services data | ||||
|         for service in self.sys_services.list_services: | ||||
|             if addon.slug not in service.active: | ||||
|                 continue | ||||
|             service.del_service_data(addon) | ||||
|  | ||||
|         self.data.uninstall(addon) | ||||
|         self.local.pop(slug) | ||||
|  | ||||
|         _LOGGER.info("Add-on '%s' successfully removed", slug) | ||||
|  | ||||
|     @Job( | ||||
|         conditions=ADDON_UPDATE_CONDITIONS, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def update( | ||||
|         self, slug: str, backup: bool | None = False | ||||
|     ) -> Awaitable[None] | None: | ||||
|         """Update add-on. | ||||
|  | ||||
|         Returns a coroutine that completes when addon has state 'started' (see addon.start) | ||||
|         if addon is started after update. Else nothing is returned. | ||||
|         """ | ||||
|         if slug not in self.local: | ||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||
|         addon = self.local[slug] | ||||
|  | ||||
|         if addon.is_detached: | ||||
|             raise AddonsError( | ||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||
|             ) | ||||
|         store = self.store[slug] | ||||
|  | ||||
|         if addon.version == store.version: | ||||
|             raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning) | ||||
|  | ||||
|         # Check if available, Maybe something have changed | ||||
|         store.validate_availability() | ||||
|  | ||||
|         if backup: | ||||
|             await self.sys_backups.do_backup_partial( | ||||
|                 name=f"addon_{addon.slug}_{addon.version}", | ||||
|                 homeassistant=False, | ||||
|                 addons=[addon.slug], | ||||
|             ) | ||||
|  | ||||
|         # Update instance | ||||
|         last_state: AddonState = addon.state | ||||
|         old_image = addon.image | ||||
|         try: | ||||
|             await addon.instance.update(store.version, store.image) | ||||
|         except DockerError as err: | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         _LOGGER.info("Add-on '%s' successfully updated", slug) | ||||
|         self.data.update(store) | ||||
|  | ||||
|         # Cleanup | ||||
|         with suppress(DockerError): | ||||
|             await addon.instance.cleanup(old_image=old_image) | ||||
|  | ||||
|         # Setup/Fix AppArmor profile | ||||
|         await addon.install_apparmor() | ||||
|  | ||||
|         # restore state | ||||
|         return ( | ||||
|             await addon.start() | ||||
|             if last_state in [AddonState.STARTED, AddonState.STARTUP] | ||||
|             else None | ||||
|         ) | ||||
|  | ||||
|     @Job( | ||||
|         conditions=[ | ||||
|             JobCondition.FREE_SPACE, | ||||
|             JobCondition.INTERNET_HOST, | ||||
|             JobCondition.HEALTHY, | ||||
|         ], | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def rebuild(self, slug: str) -> Awaitable[None] | None: | ||||
|         """Perform a rebuild of local build add-on. | ||||
|  | ||||
|         Returns a coroutine that completes when addon has state 'started' (see addon.start) | ||||
|         if addon is started after rebuild. Else nothing is returned. | ||||
|         """ | ||||
|         if slug not in self.local: | ||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||
|         addon = self.local[slug] | ||||
|  | ||||
|         if addon.is_detached: | ||||
|             raise AddonsError( | ||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||
|             ) | ||||
|         store = self.store[slug] | ||||
|  | ||||
|         # Check if a rebuild is possible now | ||||
|         if addon.version != store.version: | ||||
|             raise AddonsError( | ||||
|                 "Version changed, use Update instead Rebuild", _LOGGER.error | ||||
|             ) | ||||
|         if not addon.need_build: | ||||
|             raise AddonsNotSupportedError( | ||||
|                 "Can't rebuild a image based add-on", _LOGGER.error | ||||
|             ) | ||||
|  | ||||
|         # remove docker container but not addon config | ||||
|         last_state: AddonState = addon.state | ||||
|         try: | ||||
|             await addon.instance.remove() | ||||
|             await addon.instance.install(addon.version) | ||||
|         except DockerError as err: | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         self.data.update(store) | ||||
|         _LOGGER.info("Add-on '%s' successfully rebuilt", slug) | ||||
|  | ||||
|         # restore state | ||||
|         return ( | ||||
|             await addon.start() | ||||
|             if last_state in [AddonState.STARTED, AddonState.STARTUP] | ||||
|             else None | ||||
|         ) | ||||
|  | ||||
|     @Job( | ||||
|         conditions=[ | ||||
|             JobCondition.FREE_SPACE, | ||||
|             JobCondition.INTERNET_HOST, | ||||
|             JobCondition.HEALTHY, | ||||
|         ], | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def restore( | ||||
|         self, slug: str, tar_file: tarfile.TarFile | ||||
|     ) -> Awaitable[None] | None: | ||||
|         """Restore state of an add-on. | ||||
|  | ||||
|         Returns a coroutine that completes when addon has state 'started' (see addon.start) | ||||
|         if addon is started after restore. Else nothing is returned. | ||||
|         """ | ||||
|         if slug not in self.local: | ||||
|             _LOGGER.debug("Add-on %s is not local available for restore", slug) | ||||
|             addon = Addon(self.coresys, slug) | ||||
|         else: | ||||
|             _LOGGER.debug("Add-on %s is local available for restore", slug) | ||||
|             addon = self.local[slug] | ||||
|  | ||||
|         wait_for_start = await addon.restore(tar_file) | ||||
|  | ||||
|         # Check if new | ||||
|         if slug not in self.local: | ||||
|             _LOGGER.info("Detect new Add-on after restore %s", slug) | ||||
|             self.local[slug] = addon | ||||
|  | ||||
|         # Update ingress | ||||
|         if addon.with_ingress: | ||||
|             await self.sys_ingress.reload() | ||||
|             with suppress(HomeAssistantAPIError): | ||||
|                 await self.sys_ingress.update_hass_panel(addon) | ||||
|  | ||||
|         return wait_for_start | ||||
|  | ||||
|     @Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST]) | ||||
|     async def repair(self) -> None: | ||||
|         """Repair local add-ons.""" | ||||
|         needs_repair: list[Addon] = [] | ||||
|  | ||||
|         # Evaluate Add-ons to repair | ||||
|         for addon in self.installed: | ||||
|             if await addon.instance.exists(): | ||||
|                 continue | ||||
|             needs_repair.append(addon) | ||||
|  | ||||
|         _LOGGER.info("Found %d add-ons to repair", len(needs_repair)) | ||||
|         if not needs_repair: | ||||
|             return | ||||
|  | ||||
|         for addon in needs_repair: | ||||
|             _LOGGER.info("Repairing for add-on: %s", addon.slug) | ||||
|             with suppress(DockerError, KeyError): | ||||
|                 # Need pull a image again | ||||
|                 if not addon.need_build: | ||||
|                     await addon.instance.install(addon.version, addon.image) | ||||
|                     continue | ||||
|  | ||||
|                 # Need local lookup | ||||
|                 if addon.need_build and not addon.is_detached: | ||||
|                     store = self.store[addon.slug] | ||||
|                     # If this add-on is available for rebuild | ||||
|                     if addon.version == store.version: | ||||
|                         await addon.instance.install(addon.version, addon.image) | ||||
|                         continue | ||||
|  | ||||
|             _LOGGER.error("Can't repair %s", addon.slug) | ||||
|             with suppress(AddonsError): | ||||
|                 await self.uninstall(addon.slug) | ||||
|  | ||||
|     async def sync_dns(self) -> None: | ||||
|         """Sync add-ons DNS names.""" | ||||
|         # Update hosts | ||||
|         add_host_coros: list[Awaitable[None]] = [] | ||||
|         for addon in self.installed: | ||||
|             try: | ||||
|                 if not await addon.instance.is_running(): | ||||
|                     continue | ||||
|             except DockerError as err: | ||||
|                 _LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err) | ||||
|                 self.sys_resolution.create_issue( | ||||
|                     IssueType.CORRUPT_DOCKER, | ||||
|                     ContextType.ADDON, | ||||
|                     reference=addon.slug, | ||||
|                     suggestions=[SuggestionType.EXECUTE_REPAIR], | ||||
|                 ) | ||||
|                 capture_exception(err) | ||||
|             else: | ||||
|                 add_host_coros.append( | ||||
|                     self.sys_plugins.dns.add_host( | ||||
|                         ipv4=addon.ip_address, names=[addon.hostname], write=False | ||||
|                     ) | ||||
|                 ) | ||||
|  | ||||
|         await asyncio.gather(*add_host_coros) | ||||
|  | ||||
|         # Write hosts files | ||||
|         with suppress(CoreDNSError): | ||||
|             await self.sys_plugins.dns.write_hosts() | ||||
|   | ||||
| @@ -1,8 +1,11 @@ | ||||
| """Init file for Supervisor add-ons.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from contextlib import suppress | ||||
| from copy import deepcopy | ||||
| from datetime import datetime | ||||
| import errno | ||||
| from ipaddress import IPv4Address | ||||
| import logging | ||||
| from pathlib import Path, PurePath | ||||
| @@ -14,11 +17,14 @@ from tempfile import TemporaryDirectory | ||||
| from typing import Any, Final | ||||
|  | ||||
| import aiohttp | ||||
| from awesomeversion import AwesomeVersionCompareException | ||||
| from deepmerge import Merger | ||||
| from securetar import atomic_contents_add, secure_path | ||||
| import voluptuous as vol | ||||
| from voluptuous.humanize import humanize_error | ||||
|  | ||||
| from supervisor.utils.dt import utc_from_timestamp | ||||
|  | ||||
| from ..bus import EventListener | ||||
| from ..const import ( | ||||
|     ATTR_ACCESS_TOKEN, | ||||
| @@ -41,13 +47,17 @@ from ..const import ( | ||||
|     ATTR_SLUG, | ||||
|     ATTR_STATE, | ||||
|     ATTR_SYSTEM, | ||||
|     ATTR_SYSTEM_MANAGED, | ||||
|     ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, | ||||
|     ATTR_TYPE, | ||||
|     ATTR_USER, | ||||
|     ATTR_UUID, | ||||
|     ATTR_VERSION, | ||||
|     ATTR_VERSION_TIMESTAMP, | ||||
|     ATTR_WATCHDOG, | ||||
|     DNS_SUFFIX, | ||||
|     AddonBoot, | ||||
|     AddonBootConfig, | ||||
|     AddonStartup, | ||||
|     AddonState, | ||||
|     BusEvent, | ||||
| @@ -64,12 +74,15 @@ from ..exceptions import ( | ||||
|     AddonsNotSupportedError, | ||||
|     ConfigurationFileError, | ||||
|     DockerError, | ||||
|     HomeAssistantAPIError, | ||||
|     HostAppArmorError, | ||||
| ) | ||||
| from ..hardware.data import Device | ||||
| from ..homeassistant.const import WSEvent, WSType | ||||
| from ..jobs.const import JobExecutionLimit | ||||
| from ..jobs.decorator import Job | ||||
| from ..resolution.const import UnhealthyReason | ||||
| from ..store.addon import AddonStore | ||||
| from ..utils import check_port | ||||
| from ..utils.apparmor import adjust_profile | ||||
| from ..utils.json import read_json_file, write_json_file | ||||
| @@ -80,6 +93,7 @@ from .const import ( | ||||
|     WATCHDOG_THROTTLE_MAX_CALLS, | ||||
|     WATCHDOG_THROTTLE_PERIOD, | ||||
|     AddonBackupMode, | ||||
|     MappingType, | ||||
| ) | ||||
| from .model import AddonModel, Data | ||||
| from .options import AddonOptions | ||||
| @@ -129,54 +143,7 @@ class Addon(AddonModel): | ||||
|         ) | ||||
|         self._listeners: list[EventListener] = [] | ||||
|         self._startup_event = asyncio.Event() | ||||
|  | ||||
|         @Job( | ||||
|             name=f"addon_{slug}_restart_after_problem", | ||||
|             limit=JobExecutionLimit.THROTTLE_RATE_LIMIT, | ||||
|             throttle_period=WATCHDOG_THROTTLE_PERIOD, | ||||
|             throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS, | ||||
|             on_condition=AddonsJobError, | ||||
|         ) | ||||
|         async def restart_after_problem(addon: Addon, state: ContainerState): | ||||
|             """Restart unhealthy or failed addon.""" | ||||
|             attempts = 0 | ||||
|             while await addon.instance.current_state() == state: | ||||
|                 if not addon.in_progress: | ||||
|                     _LOGGER.warning( | ||||
|                         "Watchdog found addon %s is %s, restarting...", | ||||
|                         addon.name, | ||||
|                         state.value, | ||||
|                     ) | ||||
|                     try: | ||||
|                         if state == ContainerState.FAILED: | ||||
|                             # Ensure failed container is removed before attempting reanimation | ||||
|                             if attempts == 0: | ||||
|                                 with suppress(DockerError): | ||||
|                                     await addon.instance.stop(remove_container=True) | ||||
|  | ||||
|                             await (await addon.start()) | ||||
|                         else: | ||||
|                             await (await addon.restart()) | ||||
|                     except AddonsError as err: | ||||
|                         attempts = attempts + 1 | ||||
|                         _LOGGER.error( | ||||
|                             "Watchdog restart of addon %s failed!", addon.name | ||||
|                         ) | ||||
|                         capture_exception(err) | ||||
|                     else: | ||||
|                         break | ||||
|  | ||||
|                 if attempts >= WATCHDOG_MAX_ATTEMPTS: | ||||
|                     _LOGGER.critical( | ||||
|                         "Watchdog cannot restart addon %s, failed all %s attempts", | ||||
|                         addon.name, | ||||
|                         attempts, | ||||
|                     ) | ||||
|                     break | ||||
|  | ||||
|                 await asyncio.sleep(WATCHDOG_RETRY_SECONDS) | ||||
|  | ||||
|         self._restart_after_problem = restart_after_problem | ||||
|         self._startup_task: asyncio.Task | None = None | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         """Return internal representation.""" | ||||
| @@ -217,6 +184,9 @@ class Addon(AddonModel): | ||||
|  | ||||
|     async def load(self) -> None: | ||||
|         """Async initialize of object.""" | ||||
|         if self.is_detached: | ||||
|             await super().refresh_path_cache() | ||||
|  | ||||
|         self._listeners.append( | ||||
|             self.sys_bus.register_event( | ||||
|                 BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed | ||||
| @@ -228,9 +198,21 @@ class Addon(AddonModel): | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|         with suppress(DockerError): | ||||
|         await self._check_ingress_port() | ||||
|         default_image = self._image(self.data) | ||||
|         try: | ||||
|             await self.instance.attach(version=self.version) | ||||
|  | ||||
|             # Ensure we are using correct image for this system | ||||
|             await self.instance.check_image(self.version, default_image, self.arch) | ||||
|         except DockerError: | ||||
|             _LOGGER.info("No %s addon Docker image %s found", self.slug, self.image) | ||||
|             with suppress(DockerError): | ||||
|                 await self.instance.install(self.version, default_image, arch=self.arch) | ||||
|  | ||||
|         self.persist[ATTR_IMAGE] = default_image | ||||
|         self.save_persist() | ||||
|  | ||||
|     @property | ||||
|     def ip_address(self) -> IPv4Address: | ||||
|         """Return IP of add-on instance.""" | ||||
| @@ -246,6 +228,11 @@ class Addon(AddonModel): | ||||
|         """Return add-on data from store.""" | ||||
|         return self.sys_store.data.addons.get(self.slug, self.data) | ||||
|  | ||||
|     @property | ||||
|     def addon_store(self) -> AddonStore | None: | ||||
|         """Return store representation of addon.""" | ||||
|         return self.sys_addons.store.get(self.slug) | ||||
|  | ||||
|     @property | ||||
|     def persist(self) -> Data: | ||||
|         """Return add-on data/config.""" | ||||
| @@ -261,6 +248,34 @@ class Addon(AddonModel): | ||||
|         """Return True if add-on is detached.""" | ||||
|         return self.slug not in self.sys_store.data.addons | ||||
|  | ||||
|     @property | ||||
|     def with_icon(self) -> bool: | ||||
|         """Return True if an icon exists.""" | ||||
|         if self.is_detached: | ||||
|             return super().with_icon | ||||
|         return self.addon_store.with_icon | ||||
|  | ||||
|     @property | ||||
|     def with_logo(self) -> bool: | ||||
|         """Return True if a logo exists.""" | ||||
|         if self.is_detached: | ||||
|             return super().with_logo | ||||
|         return self.addon_store.with_logo | ||||
|  | ||||
|     @property | ||||
|     def with_changelog(self) -> bool: | ||||
|         """Return True if a changelog exists.""" | ||||
|         if self.is_detached: | ||||
|             return super().with_changelog | ||||
|         return self.addon_store.with_changelog | ||||
|  | ||||
|     @property | ||||
|     def with_documentation(self) -> bool: | ||||
|         """Return True if a documentation exists.""" | ||||
|         if self.is_detached: | ||||
|             return super().with_documentation | ||||
|         return self.addon_store.with_documentation | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return True if this add-on is available on this platform.""" | ||||
| @@ -297,7 +312,9 @@ class Addon(AddonModel): | ||||
|  | ||||
|     @property | ||||
|     def boot(self) -> AddonBoot: | ||||
|         """Return boot config with prio local settings.""" | ||||
|         """Return boot config with prio local settings unless config is forced.""" | ||||
|         if self.boot_config == AddonBootConfig.MANUAL_ONLY: | ||||
|             return super().boot | ||||
|         return self.persist.get(ATTR_BOOT, super().boot) | ||||
|  | ||||
|     @boot.setter | ||||
| @@ -315,6 +332,28 @@ class Addon(AddonModel): | ||||
|         """Set auto update.""" | ||||
|         self.persist[ATTR_AUTO_UPDATE] = value | ||||
|  | ||||
|     @property | ||||
|     def auto_update_available(self) -> bool: | ||||
|         """Return if it is safe to auto update addon.""" | ||||
|         if not self.need_update or not self.auto_update: | ||||
|             return False | ||||
|  | ||||
|         for version in self.breaking_versions: | ||||
|             try: | ||||
|                 # Must update to latest so if true update crosses a breaking version | ||||
|                 if self.version < version: | ||||
|                     return False | ||||
|             except AwesomeVersionCompareException: | ||||
|                 # If version scheme changed, we may get compare exception | ||||
|                 # If latest version >= breaking version then assume update will | ||||
|                 # cross it as the version scheme changes | ||||
|                 # If both versions have compare exception, ignore as its in the past | ||||
|                 with suppress(AwesomeVersionCompareException): | ||||
|                     if self.latest_version >= version: | ||||
|                         return False | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     @property | ||||
|     def watchdog(self) -> bool: | ||||
|         """Return True if watchdog is enable.""" | ||||
| @@ -330,6 +369,37 @@ class Addon(AddonModel): | ||||
|         else: | ||||
|             self.persist[ATTR_WATCHDOG] = value | ||||
|  | ||||
|     @property | ||||
|     def system_managed(self) -> bool: | ||||
|         """Return True if addon is managed by Home Assistant.""" | ||||
|         return self.persist[ATTR_SYSTEM_MANAGED] | ||||
|  | ||||
|     @system_managed.setter | ||||
|     def system_managed(self, value: bool) -> None: | ||||
|         """Set system managed enable/disable.""" | ||||
|         if not value and self.system_managed_config_entry: | ||||
|             self.system_managed_config_entry = None | ||||
|  | ||||
|         self.persist[ATTR_SYSTEM_MANAGED] = value | ||||
|  | ||||
|     @property | ||||
|     def system_managed_config_entry(self) -> str | None: | ||||
|         """Return id of config entry managing this addon (if any).""" | ||||
|         if not self.system_managed: | ||||
|             return None | ||||
|         return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY) | ||||
|  | ||||
|     @system_managed_config_entry.setter | ||||
|     def system_managed_config_entry(self, value: str | None) -> None: | ||||
|         """Set ID of config entry managing this addon.""" | ||||
|         if not self.system_managed: | ||||
|             _LOGGER.warning( | ||||
|                 "Ignoring system managed config entry for %s because it is not system managed", | ||||
|                 self.slug, | ||||
|             ) | ||||
|         else: | ||||
|             self.persist[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] = value | ||||
|  | ||||
|     @property | ||||
|     def uuid(self) -> str: | ||||
|         """Return an API token for this add-on.""" | ||||
| @@ -357,6 +427,11 @@ class Addon(AddonModel): | ||||
|         """Return version of add-on.""" | ||||
|         return self.data_store[ATTR_VERSION] | ||||
|  | ||||
|     @property | ||||
|     def latest_version_timestamp(self) -> datetime: | ||||
|         """Return when latest version was first seen.""" | ||||
|         return utc_from_timestamp(self.data_store[ATTR_VERSION_TIMESTAMP]) | ||||
|  | ||||
|     @property | ||||
|     def protected(self) -> bool: | ||||
|         """Return if add-on is in protected mode.""" | ||||
| @@ -434,7 +509,7 @@ class Addon(AddonModel): | ||||
|  | ||||
|         port = self.data[ATTR_INGRESS_PORT] | ||||
|         if port == 0: | ||||
|             return self.sys_ingress.get_dynamic_port(self.slug) | ||||
|             raise RuntimeError(f"No port set for add-on {self.slug}") | ||||
|         return port | ||||
|  | ||||
|     @property | ||||
| @@ -500,6 +575,21 @@ class Addon(AddonModel): | ||||
|         """Return add-on data path external for Docker.""" | ||||
|         return PurePath(self.sys_config.path_extern_addons_data, self.slug) | ||||
|  | ||||
|     @property | ||||
|     def addon_config_used(self) -> bool: | ||||
|         """Add-on is using its public config folder.""" | ||||
|         return MappingType.ADDON_CONFIG in self.map_volumes | ||||
|  | ||||
|     @property | ||||
|     def path_config(self) -> Path: | ||||
|         """Return add-on config path inside Supervisor.""" | ||||
|         return Path(self.sys_config.path_addon_configs, self.slug) | ||||
|  | ||||
|     @property | ||||
|     def path_extern_config(self) -> PurePath: | ||||
|         """Return add-on config path external for Docker.""" | ||||
|         return PurePath(self.sys_config.path_extern_addon_configs, self.slug) | ||||
|  | ||||
|     @property | ||||
|     def path_options(self) -> Path: | ||||
|         """Return path to add-on options.""" | ||||
| @@ -563,7 +653,7 @@ class Addon(AddonModel): | ||||
|  | ||||
|         # TCP monitoring | ||||
|         if s_prefix == "tcp": | ||||
|             return await self.sys_run_in_executor(check_port, self.ip_address, port) | ||||
|             return await check_port(self.ip_address, port) | ||||
|  | ||||
|         # lookup the correct protocol from config | ||||
|         if t_proto: | ||||
| @@ -579,7 +669,7 @@ class Addon(AddonModel): | ||||
|             ) as req: | ||||
|                 if req.status < 300: | ||||
|                     return True | ||||
|         except (asyncio.TimeoutError, aiohttp.ClientError): | ||||
|         except (TimeoutError, aiohttp.ClientError): | ||||
|             pass | ||||
|  | ||||
|         return False | ||||
| @@ -606,16 +696,208 @@ class Addon(AddonModel): | ||||
|  | ||||
|         raise AddonConfigurationError() | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_unload", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def unload(self) -> None: | ||||
|         """Unload add-on and remove data.""" | ||||
|         if self._startup_task: | ||||
|             # If we were waiting on startup, cancel that and let the task finish before proceeding | ||||
|             self._startup_task.cancel(f"Removing add-on {self.name} from system") | ||||
|             with suppress(asyncio.CancelledError): | ||||
|                 await self._startup_task | ||||
|  | ||||
|         for listener in self._listeners: | ||||
|             self.sys_bus.remove_listener(listener) | ||||
|  | ||||
|         if not self.path_data.is_dir(): | ||||
|         if self.path_data.is_dir(): | ||||
|             _LOGGER.info("Removing add-on data folder %s", self.path_data) | ||||
|             await remove_data(self.path_data) | ||||
|  | ||||
|     async def _check_ingress_port(self): | ||||
|         """Assign a ingress port if dynamic port selection is used.""" | ||||
|         if not self.with_ingress: | ||||
|             return | ||||
|  | ||||
|         _LOGGER.info("Removing add-on data folder %s", self.path_data) | ||||
|         await remove_data(self.path_data) | ||||
|         if self.data[ATTR_INGRESS_PORT] == 0: | ||||
|             self.data[ATTR_INGRESS_PORT] = await self.sys_ingress.get_dynamic_port( | ||||
|                 self.slug | ||||
|             ) | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_install", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def install(self) -> None: | ||||
|         """Install and setup this addon.""" | ||||
|         self.sys_addons.data.install(self.addon_store) | ||||
|         await self.load() | ||||
|  | ||||
|         if not self.path_data.is_dir(): | ||||
|             _LOGGER.info( | ||||
|                 "Creating Home Assistant add-on data folder %s", self.path_data | ||||
|             ) | ||||
|             self.path_data.mkdir() | ||||
|  | ||||
|         # Setup/Fix AppArmor profile | ||||
|         await self.install_apparmor() | ||||
|  | ||||
|         # Install image | ||||
|         try: | ||||
|             await self.instance.install( | ||||
|                 self.latest_version, self.addon_store.image, arch=self.arch | ||||
|             ) | ||||
|         except DockerError as err: | ||||
|             self.sys_addons.data.uninstall(self) | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         # Add to addon manager | ||||
|         self.sys_addons.local[self.slug] = self | ||||
|  | ||||
|         # Reload ingress tokens | ||||
|         if self.with_ingress: | ||||
|             await self.sys_ingress.reload() | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_uninstall", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def uninstall( | ||||
|         self, *, remove_config: bool, remove_image: bool = True | ||||
|     ) -> None: | ||||
|         """Uninstall and cleanup this addon.""" | ||||
|         try: | ||||
|             await self.instance.remove(remove_image=remove_image) | ||||
|         except DockerError as err: | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         self.state = AddonState.UNKNOWN | ||||
|  | ||||
|         await self.unload() | ||||
|  | ||||
|         # Remove config if present and requested | ||||
|         if self.addon_config_used and remove_config: | ||||
|             await remove_data(self.path_config) | ||||
|  | ||||
|         # Cleanup audio settings | ||||
|         if self.path_pulse.exists(): | ||||
|             with suppress(OSError): | ||||
|                 self.path_pulse.unlink() | ||||
|  | ||||
|         # Cleanup AppArmor profile | ||||
|         with suppress(HostAppArmorError): | ||||
|             await self.uninstall_apparmor() | ||||
|  | ||||
|         # Cleanup Ingress panel from sidebar | ||||
|         if self.ingress_panel: | ||||
|             self.ingress_panel = False | ||||
|             with suppress(HomeAssistantAPIError): | ||||
|                 await self.sys_ingress.update_hass_panel(self) | ||||
|  | ||||
|         # Cleanup Ingress dynamic port assignment | ||||
|         if self.with_ingress: | ||||
|             self.sys_create_task(self.sys_ingress.reload()) | ||||
|             self.sys_ingress.del_dynamic_port(self.slug) | ||||
|  | ||||
|         # Cleanup discovery data | ||||
|         for message in self.sys_discovery.list_messages: | ||||
|             if message.addon != self.slug: | ||||
|                 continue | ||||
|             self.sys_discovery.remove(message) | ||||
|  | ||||
|         # Cleanup services data | ||||
|         for service in self.sys_services.list_services: | ||||
|             if self.slug not in service.active: | ||||
|                 continue | ||||
|             service.del_service_data(self) | ||||
|  | ||||
|         # Remove from addon manager | ||||
|         self.sys_addons.data.uninstall(self) | ||||
|         self.sys_addons.local.pop(self.slug) | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_update", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def update(self) -> asyncio.Task | None: | ||||
|         """Update this addon to latest version. | ||||
|  | ||||
|         Returns a Task that completes when addon has state 'started' (see start) | ||||
|         if it was running. Else nothing is returned. | ||||
|         """ | ||||
|         old_image = self.image | ||||
|         # Cache data to prevent races with other updates to global | ||||
|         store = self.addon_store.clone() | ||||
|  | ||||
|         try: | ||||
|             await self.instance.update(store.version, store.image, arch=self.arch) | ||||
|         except DockerError as err: | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         # Stop the addon if running | ||||
|         if (last_state := self.state) in {AddonState.STARTED, AddonState.STARTUP}: | ||||
|             await self.stop() | ||||
|  | ||||
|         try: | ||||
|             _LOGGER.info("Add-on '%s' successfully updated", self.slug) | ||||
|             self.sys_addons.data.update(store) | ||||
|             await self._check_ingress_port() | ||||
|  | ||||
|             # Cleanup | ||||
|             with suppress(DockerError): | ||||
|                 await self.instance.cleanup( | ||||
|                     old_image=old_image, image=store.image, version=store.version | ||||
|                 ) | ||||
|  | ||||
|             # Setup/Fix AppArmor profile | ||||
|             await self.install_apparmor() | ||||
|  | ||||
|         finally: | ||||
|             # restore state. Return Task for caller if no exception | ||||
|             out = ( | ||||
|                 await self.start() | ||||
|                 if last_state in {AddonState.STARTED, AddonState.STARTUP} | ||||
|                 else None | ||||
|             ) | ||||
|         return out | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_rebuild", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def rebuild(self) -> asyncio.Task | None: | ||||
|         """Rebuild this addons container and image. | ||||
|  | ||||
|         Returns a Task that completes when addon has state 'started' (see start) | ||||
|         if it was running. Else nothing is returned. | ||||
|         """ | ||||
|         last_state: AddonState = self.state | ||||
|         try: | ||||
|             # remove docker container but not addon config | ||||
|             try: | ||||
|                 await self.instance.remove() | ||||
|                 await self.instance.install(self.version) | ||||
|             except DockerError as err: | ||||
|                 raise AddonsError() from err | ||||
|  | ||||
|             self.sys_addons.data.update(self.addon_store) | ||||
|             await self._check_ingress_port() | ||||
|             _LOGGER.info("Add-on '%s' successfully rebuilt", self.slug) | ||||
|  | ||||
|         finally: | ||||
|             # restore state | ||||
|             out = ( | ||||
|                 await self.start() | ||||
|                 if last_state in [AddonState.STARTED, AddonState.STARTUP] | ||||
|                 else None | ||||
|             ) | ||||
|         return out | ||||
|  | ||||
|     def write_pulse(self) -> None: | ||||
|         """Write asound config to file and return True on success.""" | ||||
| @@ -631,6 +913,8 @@ class Addon(AddonModel): | ||||
|         try: | ||||
|             self.path_pulse.write_text(pulse_config, encoding="utf-8") | ||||
|         except OSError as err: | ||||
|             if err.errno == errno.EBADMSG: | ||||
|                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||
|             _LOGGER.error( | ||||
|                 "Add-on %s can't write pulse/client.config: %s", self.slug, err | ||||
|             ) | ||||
| @@ -699,24 +983,34 @@ class Addon(AddonModel): | ||||
|     async def _wait_for_startup(self) -> None: | ||||
|         """Wait for startup event to be set with timeout.""" | ||||
|         try: | ||||
|             await asyncio.wait_for(self._startup_event.wait(), STARTUP_TIMEOUT) | ||||
|         except asyncio.TimeoutError: | ||||
|             self._startup_task = self.sys_create_task(self._startup_event.wait()) | ||||
|             await asyncio.wait_for(self._startup_task, STARTUP_TIMEOUT) | ||||
|         except TimeoutError: | ||||
|             _LOGGER.warning( | ||||
|                 "Timeout while waiting for addon %s to start, took more then %s seconds", | ||||
|                 "Timeout while waiting for addon %s to start, took more than %s seconds", | ||||
|                 self.name, | ||||
|                 STARTUP_TIMEOUT, | ||||
|             ) | ||||
|         except asyncio.CancelledError as err: | ||||
|             _LOGGER.info("Wait for addon startup task cancelled due to: %s", err) | ||||
|         finally: | ||||
|             self._startup_task = None | ||||
|  | ||||
|     async def start(self) -> Awaitable[None]: | ||||
|     @Job( | ||||
|         name="addon_start", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def start(self) -> asyncio.Task: | ||||
|         """Set options and start add-on. | ||||
|  | ||||
|         Returns a coroutine that completes when addon has state 'started'. | ||||
|         Returns a Task that completes when addon has state 'started'. | ||||
|         For addons with a healthcheck, that is when they become healthy or unhealthy. | ||||
|         Addons without a healthcheck have state 'started' immediately. | ||||
|         """ | ||||
|         if await self.instance.is_running(): | ||||
|             _LOGGER.warning("%s is already running!", self.slug) | ||||
|             return self._wait_for_startup() | ||||
|             return self.sys_create_task(self._wait_for_startup()) | ||||
|  | ||||
|         # Access Token | ||||
|         self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56) | ||||
| @@ -729,6 +1023,18 @@ class Addon(AddonModel): | ||||
|         if self.with_audio: | ||||
|             self.write_pulse() | ||||
|  | ||||
|         def _check_addon_config_dir(): | ||||
|             if self.path_config.is_dir(): | ||||
|                 return | ||||
|  | ||||
|             _LOGGER.info( | ||||
|                 "Creating Home Assistant add-on config folder %s", self.path_config | ||||
|             ) | ||||
|             self.path_config.mkdir() | ||||
|  | ||||
|         if self.addon_config_used: | ||||
|             await self.sys_run_in_executor(_check_addon_config_dir) | ||||
|  | ||||
|         # Start Add-on | ||||
|         self._startup_event.clear() | ||||
|         try: | ||||
| @@ -737,8 +1043,13 @@ class Addon(AddonModel): | ||||
|             self.state = AddonState.ERROR | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|         return self._wait_for_startup() | ||||
|         return self.sys_create_task(self._wait_for_startup()) | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_stop", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def stop(self) -> None: | ||||
|         """Stop add-on.""" | ||||
|         self._manual_stop = True | ||||
| @@ -748,10 +1059,15 @@ class Addon(AddonModel): | ||||
|             self.state = AddonState.ERROR | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|     async def restart(self) -> Awaitable[None]: | ||||
|     @Job( | ||||
|         name="addon_restart", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def restart(self) -> asyncio.Task: | ||||
|         """Restart add-on. | ||||
|  | ||||
|         Returns a coroutine that completes when addon has state 'started' (see start). | ||||
|         Returns a Task that completes when addon has state 'started' (see start). | ||||
|         """ | ||||
|         with suppress(AddonsError): | ||||
|             await self.stop() | ||||
| @@ -778,11 +1094,13 @@ class Addon(AddonModel): | ||||
|         except DockerError as err: | ||||
|             raise AddonsError() from err | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_write_stdin", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def write_stdin(self, data) -> None: | ||||
|         """Write data to add-on stdin. | ||||
|  | ||||
|         Return a coroutine. | ||||
|         """ | ||||
|         """Write data to add-on stdin.""" | ||||
|         if not self.with_stdin: | ||||
|             raise AddonsNotSupportedError( | ||||
|                 f"Add-on {self.slug} does not support writing to stdin!", _LOGGER.error | ||||
| @@ -810,14 +1128,59 @@ class Addon(AddonModel): | ||||
|                 _LOGGER.error, | ||||
|             ) from err | ||||
|  | ||||
|     async def backup(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None: | ||||
|     @Job( | ||||
|         name="addon_begin_backup", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def begin_backup(self) -> bool: | ||||
|         """Execute pre commands or stop addon if necessary. | ||||
|  | ||||
|         Returns value of `is_running`. Caller should not call `end_backup` if return is false. | ||||
|         """ | ||||
|         if not await self.is_running(): | ||||
|             return False | ||||
|  | ||||
|         if self.backup_mode == AddonBackupMode.COLD: | ||||
|             _LOGGER.info("Shutdown add-on %s for cold backup", self.slug) | ||||
|             await self.stop() | ||||
|  | ||||
|         elif self.backup_pre is not None: | ||||
|             await self._backup_command(self.backup_pre) | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_end_backup", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def end_backup(self) -> asyncio.Task | None: | ||||
|         """Execute post commands or restart addon if necessary. | ||||
|  | ||||
|         Returns a Task that completes when addon has state 'started' (see start) | ||||
|         for cold backup. Else nothing is returned. | ||||
|         """ | ||||
|         if self.backup_mode is AddonBackupMode.COLD: | ||||
|             _LOGGER.info("Starting add-on %s again", self.slug) | ||||
|             return await self.start() | ||||
|  | ||||
|         if self.backup_post is not None: | ||||
|             await self._backup_command(self.backup_post) | ||||
|         return None | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_backup", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def backup(self, tar_file: tarfile.TarFile) -> asyncio.Task | None: | ||||
|         """Backup state of an add-on. | ||||
|  | ||||
|         Returns a coroutine that completes when addon has state 'started' (see start) | ||||
|         Returns a Task that completes when addon has state 'started' (see start) | ||||
|         for cold backup. Else nothing is returned. | ||||
|         """ | ||||
|         wait_for_start: Awaitable[None] | None = None | ||||
|         is_running = await self.is_running() | ||||
|  | ||||
|         with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp: | ||||
|             temp_path = Path(temp) | ||||
| @@ -869,16 +1232,16 @@ class Addon(AddonModel): | ||||
|                         arcname="data", | ||||
|                     ) | ||||
|  | ||||
|             if ( | ||||
|                 is_running | ||||
|                 and self.backup_mode == AddonBackupMode.HOT | ||||
|                 and self.backup_pre is not None | ||||
|             ): | ||||
|                 await self._backup_command(self.backup_pre) | ||||
|             elif is_running and self.backup_mode == AddonBackupMode.COLD: | ||||
|                 _LOGGER.info("Shutdown add-on %s for cold backup", self.slug) | ||||
|                 await self.instance.stop() | ||||
|                     # Backup config | ||||
|                     if self.addon_config_used: | ||||
|                         atomic_contents_add( | ||||
|                             backup, | ||||
|                             self.path_config, | ||||
|                             excludes=self.backup_exclude, | ||||
|                             arcname="config", | ||||
|                         ) | ||||
|  | ||||
|             is_running = await self.begin_backup() | ||||
|             try: | ||||
|                 _LOGGER.info("Building backup for add-on %s", self.slug) | ||||
|                 await self.sys_run_in_executor(_write_tarfile) | ||||
| @@ -887,23 +1250,21 @@ class Addon(AddonModel): | ||||
|                     f"Can't write tarfile {tar_file}: {err}", _LOGGER.error | ||||
|                 ) from err | ||||
|             finally: | ||||
|                 if ( | ||||
|                     is_running | ||||
|                     and self.backup_mode == AddonBackupMode.HOT | ||||
|                     and self.backup_post is not None | ||||
|                 ): | ||||
|                     await self._backup_command(self.backup_post) | ||||
|                 elif is_running and self.backup_mode is AddonBackupMode.COLD: | ||||
|                     _LOGGER.info("Starting add-on %s again", self.slug) | ||||
|                     wait_for_start = await self.start() | ||||
|                 if is_running: | ||||
|                     wait_for_start = await self.end_backup() | ||||
|  | ||||
|         _LOGGER.info("Finish backup for addon %s", self.slug) | ||||
|         return wait_for_start | ||||
|  | ||||
|     async def restore(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None: | ||||
|     @Job( | ||||
|         name="addon_restore", | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def restore(self, tar_file: tarfile.TarFile) -> asyncio.Task | None: | ||||
|         """Restore state of an add-on. | ||||
|  | ||||
|         Returns a coroutine that completes when addon has state 'started' (see start) | ||||
|         Returns a Task that completes when addon has state 'started' (see start) | ||||
|         if addon is started after restore. Else nothing is returned. | ||||
|         """ | ||||
|         wait_for_start: Awaitable[None] | None = None | ||||
| @@ -912,7 +1273,11 @@ class Addon(AddonModel): | ||||
|             def _extract_tarfile(): | ||||
|                 """Extract tar backup.""" | ||||
|                 with tar_file as backup: | ||||
|                     backup.extractall(path=Path(temp), members=secure_path(backup)) | ||||
|                     backup.extractall( | ||||
|                         path=Path(temp), | ||||
|                         members=secure_path(backup), | ||||
|                         filter="fully_trusted", | ||||
|                     ) | ||||
|  | ||||
|             try: | ||||
|                 await self.sys_run_in_executor(_extract_tarfile) | ||||
| @@ -950,64 +1315,81 @@ class Addon(AddonModel): | ||||
|                 self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image | ||||
|             ) | ||||
|  | ||||
|             # Check version / restore image | ||||
|             version = data[ATTR_VERSION] | ||||
|             if not await self.instance.exists(): | ||||
|                 _LOGGER.info("Restore/Install of image for addon %s", self.slug) | ||||
|             # Stop it first if its running | ||||
|             if await self.instance.is_running(): | ||||
|                 await self.stop() | ||||
|  | ||||
|                 image_file = Path(temp, "image.tar") | ||||
|                 if image_file.is_file(): | ||||
|                     with suppress(DockerError): | ||||
|                         await self.instance.import_image(image_file) | ||||
|                 else: | ||||
|                     with suppress(DockerError): | ||||
|                         await self.instance.install(version, restore_image) | ||||
|                         await self.instance.cleanup() | ||||
|             elif self.instance.version != version or self.legacy: | ||||
|                 _LOGGER.info("Restore/Update of image for addon %s", self.slug) | ||||
|                 with suppress(DockerError): | ||||
|                     await self.instance.update(version, restore_image) | ||||
|             else: | ||||
|                 with suppress(DockerError): | ||||
|                     await self.instance.stop() | ||||
|  | ||||
|             # Restore data | ||||
|             def _restore_data(): | ||||
|                 """Restore data.""" | ||||
|                 temp_data = Path(temp, "data") | ||||
|                 if temp_data.is_dir(): | ||||
|                     shutil.copytree(temp_data, self.path_data, symlinks=True) | ||||
|                 else: | ||||
|                     self.path_data.mkdir() | ||||
|  | ||||
|             _LOGGER.info("Restoring data for addon %s", self.slug) | ||||
|             if self.path_data.is_dir(): | ||||
|                 await remove_data(self.path_data) | ||||
|             try: | ||||
|                 await self.sys_run_in_executor(_restore_data) | ||||
|             except shutil.Error as err: | ||||
|                 raise AddonsError( | ||||
|                     f"Can't restore origin data: {err}", _LOGGER.error | ||||
|                 ) from err | ||||
|                 # Check version / restore image | ||||
|                 version = data[ATTR_VERSION] | ||||
|                 if not await self.instance.exists(): | ||||
|                     _LOGGER.info("Restore/Install of image for addon %s", self.slug) | ||||
|  | ||||
|                     image_file = Path(temp, "image.tar") | ||||
|                     if image_file.is_file(): | ||||
|                         with suppress(DockerError): | ||||
|                             await self.instance.import_image(image_file) | ||||
|                     else: | ||||
|                         with suppress(DockerError): | ||||
|                             await self.instance.install( | ||||
|                                 version, restore_image, self.arch | ||||
|                             ) | ||||
|                             await self.instance.cleanup() | ||||
|                 elif self.instance.version != version or self.legacy: | ||||
|                     _LOGGER.info("Restore/Update of image for addon %s", self.slug) | ||||
|                     with suppress(DockerError): | ||||
|                         await self.instance.update(version, restore_image, self.arch) | ||||
|                 await self._check_ingress_port() | ||||
|  | ||||
|                 # Restore data and config | ||||
|                 def _restore_data(): | ||||
|                     """Restore data and config.""" | ||||
|                     temp_data = Path(temp, "data") | ||||
|                     if temp_data.is_dir(): | ||||
|                         shutil.copytree(temp_data, self.path_data, symlinks=True) | ||||
|                     else: | ||||
|                         self.path_data.mkdir() | ||||
|  | ||||
|                     temp_config = Path(temp, "config") | ||||
|                     if temp_config.is_dir(): | ||||
|                         shutil.copytree(temp_config, self.path_config, symlinks=True) | ||||
|                     elif self.addon_config_used: | ||||
|                         self.path_config.mkdir() | ||||
|  | ||||
|                 _LOGGER.info("Restoring data and config for addon %s", self.slug) | ||||
|                 if self.path_data.is_dir(): | ||||
|                     await remove_data(self.path_data) | ||||
|                 if self.path_config.is_dir(): | ||||
|                     await remove_data(self.path_config) | ||||
|  | ||||
|             # Restore AppArmor | ||||
|             profile_file = Path(temp, "apparmor.txt") | ||||
|             if profile_file.exists(): | ||||
|                 try: | ||||
|                     await self.sys_host.apparmor.load_profile(self.slug, profile_file) | ||||
|                 except HostAppArmorError as err: | ||||
|                     _LOGGER.error( | ||||
|                         "Can't restore AppArmor profile for add-on %s", self.slug | ||||
|                     ) | ||||
|                     raise AddonsError() from err | ||||
|                     await self.sys_run_in_executor(_restore_data) | ||||
|                 except shutil.Error as err: | ||||
|                     raise AddonsError( | ||||
|                         f"Can't restore origin data: {err}", _LOGGER.error | ||||
|                     ) from err | ||||
|  | ||||
|             # Is add-on loaded | ||||
|             if not self.loaded: | ||||
|                 await self.load() | ||||
|                 # Restore AppArmor | ||||
|                 profile_file = Path(temp, "apparmor.txt") | ||||
|                 if profile_file.exists(): | ||||
|                     try: | ||||
|                         await self.sys_host.apparmor.load_profile( | ||||
|                             self.slug, profile_file | ||||
|                         ) | ||||
|                     except HostAppArmorError as err: | ||||
|                         _LOGGER.error( | ||||
|                             "Can't restore AppArmor profile for add-on %s", self.slug | ||||
|                         ) | ||||
|                         raise AddonsError() from err | ||||
|  | ||||
|             # Run add-on | ||||
|             if data[ATTR_STATE] == AddonState.STARTED: | ||||
|                 wait_for_start = await self.start() | ||||
|             finally: | ||||
|                 # Is add-on loaded | ||||
|                 if not self.loaded: | ||||
|                     await self.load() | ||||
|  | ||||
|                 # Run add-on | ||||
|                 if data[ATTR_STATE] == AddonState.STARTED: | ||||
|                     wait_for_start = await self.start() | ||||
|  | ||||
|         _LOGGER.info("Finished restore for add-on %s", self.slug) | ||||
|         return wait_for_start | ||||
| @@ -1019,6 +1401,50 @@ class Addon(AddonModel): | ||||
|         """ | ||||
|         return self.instance.check_trust() | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_restart_after_problem", | ||||
|         limit=JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT, | ||||
|         throttle_period=WATCHDOG_THROTTLE_PERIOD, | ||||
|         throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def _restart_after_problem(self, state: ContainerState): | ||||
|         """Restart unhealthy or failed addon.""" | ||||
|         attempts = 0 | ||||
|         while await self.instance.current_state() == state: | ||||
|             if not self.in_progress: | ||||
|                 _LOGGER.warning( | ||||
|                     "Watchdog found addon %s is %s, restarting...", | ||||
|                     self.name, | ||||
|                     state, | ||||
|                 ) | ||||
|                 try: | ||||
|                     if state == ContainerState.FAILED: | ||||
|                         # Ensure failed container is removed before attempting reanimation | ||||
|                         if attempts == 0: | ||||
|                             with suppress(DockerError): | ||||
|                                 await self.instance.stop(remove_container=True) | ||||
|  | ||||
|                         await (await self.start()) | ||||
|                     else: | ||||
|                         await (await self.restart()) | ||||
|                 except AddonsError as err: | ||||
|                     attempts = attempts + 1 | ||||
|                     _LOGGER.error("Watchdog restart of addon %s failed!", self.name) | ||||
|                     capture_exception(err) | ||||
|                 else: | ||||
|                     break | ||||
|  | ||||
|             if attempts >= WATCHDOG_MAX_ATTEMPTS: | ||||
|                 _LOGGER.critical( | ||||
|                     "Watchdog cannot restart addon %s, failed all %s attempts", | ||||
|                     self.name, | ||||
|                     attempts, | ||||
|                 ) | ||||
|                 break | ||||
|  | ||||
|             await asyncio.sleep(WATCHDOG_RETRY_SECONDS) | ||||
|  | ||||
|     async def container_state_changed(self, event: DockerContainerStateEvent) -> None: | ||||
|         """Set addon state from container state.""" | ||||
|         if event.name != self.instance.name: | ||||
| @@ -1053,4 +1479,10 @@ class Addon(AddonModel): | ||||
|             ContainerState.STOPPED, | ||||
|             ContainerState.UNHEALTHY, | ||||
|         ]: | ||||
|             await self._restart_after_problem(self, event.state) | ||||
|             await self._restart_after_problem(event.state) | ||||
|  | ||||
|     def refresh_path_cache(self) -> Awaitable[None]: | ||||
|         """Refresh cache of existing paths.""" | ||||
|         if self.is_detached: | ||||
|             return super().refresh_path_cache() | ||||
|         return self.addon_store.refresh_path_cache() | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Supervisor add-on build environment.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from functools import cached_property | ||||
| @@ -102,11 +103,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes): | ||||
|         except HassioArchNotFound: | ||||
|             return False | ||||
|  | ||||
|     def get_docker_args(self, version: AwesomeVersion): | ||||
|     def get_docker_args(self, version: AwesomeVersion, image: str | None = None): | ||||
|         """Create a dict with Docker build arguments.""" | ||||
|         args = { | ||||
|             "path": str(self.addon.path_location), | ||||
|             "tag": f"{self.addon.image}:{version!s}", | ||||
|             "tag": f"{image or self.addon.image}:{version!s}", | ||||
|             "dockerfile": str(self.dockerfile), | ||||
|             "pull": True, | ||||
|             "forcerm": not self.sys_dev, | ||||
|   | ||||
							
								
								
									
										11
									
								
								supervisor/addons/configuration.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								supervisor/addons/configuration.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| """Confgiuration Objects for Addon Config.""" | ||||
|  | ||||
| from dataclasses import dataclass | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class FolderMapping: | ||||
|     """Represent folder mapping configuration.""" | ||||
|  | ||||
|     path: str | None | ||||
|     read_only: bool | ||||
| @@ -1,19 +1,38 @@ | ||||
| """Add-on static data.""" | ||||
|  | ||||
| from datetime import timedelta | ||||
| from enum import Enum | ||||
| from enum import StrEnum | ||||
|  | ||||
| from ..jobs.const import JobCondition | ||||
|  | ||||
|  | ||||
| class AddonBackupMode(str, Enum): | ||||
| class AddonBackupMode(StrEnum): | ||||
|     """Backup mode of an Add-on.""" | ||||
|  | ||||
|     HOT = "hot" | ||||
|     COLD = "cold" | ||||
|  | ||||
|  | ||||
| class MappingType(StrEnum): | ||||
|     """Mapping type of an Add-on Folder.""" | ||||
|  | ||||
|     DATA = "data" | ||||
|     CONFIG = "config" | ||||
|     SSL = "ssl" | ||||
|     ADDONS = "addons" | ||||
|     BACKUP = "backup" | ||||
|     SHARE = "share" | ||||
|     MEDIA = "media" | ||||
|     HOMEASSISTANT_CONFIG = "homeassistant_config" | ||||
|     ALL_ADDON_CONFIGS = "all_addon_configs" | ||||
|     ADDON_CONFIG = "addon_config" | ||||
|  | ||||
|  | ||||
| ATTR_BACKUP = "backup" | ||||
| ATTR_BREAKING_VERSIONS = "breaking_versions" | ||||
| ATTR_CODENOTARY = "codenotary" | ||||
| ATTR_READ_ONLY = "read_only" | ||||
| ATTR_PATH = "path" | ||||
| WATCHDOG_RETRY_SECONDS = 10 | ||||
| WATCHDOG_MAX_ATTEMPTS = 5 | ||||
| WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor add-on data.""" | ||||
|  | ||||
| from copy import deepcopy | ||||
| from typing import Any | ||||
|  | ||||
|   | ||||
							
								
								
									
										388
									
								
								supervisor/addons/manager.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										388
									
								
								supervisor/addons/manager.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,388 @@ | ||||
| """Supervisor add-on manager.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from contextlib import suppress | ||||
| import logging | ||||
| import tarfile | ||||
| from typing import Union | ||||
|  | ||||
| from ..const import AddonBoot, AddonStartup, AddonState | ||||
| from ..coresys import CoreSys, CoreSysAttributes | ||||
| from ..exceptions import ( | ||||
|     AddonConfigurationError, | ||||
|     AddonsError, | ||||
|     AddonsJobError, | ||||
|     AddonsNotSupportedError, | ||||
|     CoreDNSError, | ||||
|     DockerAPIError, | ||||
|     DockerError, | ||||
|     DockerNotFound, | ||||
|     HassioError, | ||||
|     HomeAssistantAPIError, | ||||
| ) | ||||
| from ..jobs.decorator import Job, JobCondition | ||||
| from ..resolution.const import ContextType, IssueType, SuggestionType | ||||
| from ..store.addon import AddonStore | ||||
| from ..utils import check_exception_chain | ||||
| from ..utils.sentry import capture_exception | ||||
| from .addon import Addon | ||||
| from .const import ADDON_UPDATE_CONDITIONS | ||||
| from .data import AddonsData | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| AnyAddon = Union[Addon, AddonStore] | ||||
|  | ||||
|  | ||||
| class AddonManager(CoreSysAttributes): | ||||
|     """Manage add-ons inside Supervisor.""" | ||||
|  | ||||
|     def __init__(self, coresys: CoreSys): | ||||
|         """Initialize Docker base wrapper.""" | ||||
|         self.coresys: CoreSys = coresys | ||||
|         self.data: AddonsData = AddonsData(coresys) | ||||
|         self.local: dict[str, Addon] = {} | ||||
|         self.store: dict[str, AddonStore] = {} | ||||
|  | ||||
|     @property | ||||
|     def all(self) -> list[AnyAddon]: | ||||
|         """Return a list of all add-ons.""" | ||||
|         addons: dict[str, AnyAddon] = {**self.store, **self.local} | ||||
|         return list(addons.values()) | ||||
|  | ||||
|     @property | ||||
|     def installed(self) -> list[Addon]: | ||||
|         """Return a list of all installed add-ons.""" | ||||
|         return list(self.local.values()) | ||||
|  | ||||
|     def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None: | ||||
|         """Return an add-on from slug. | ||||
|  | ||||
|         Prio: | ||||
|           1 - Local | ||||
|           2 - Store | ||||
|         """ | ||||
|         if addon_slug in self.local: | ||||
|             return self.local[addon_slug] | ||||
|         if not local_only: | ||||
|             return self.store.get(addon_slug) | ||||
|         return None | ||||
|  | ||||
|     def from_token(self, token: str) -> Addon | None: | ||||
|         """Return an add-on from Supervisor token.""" | ||||
|         for addon in self.installed: | ||||
|             if token == addon.supervisor_token: | ||||
|                 return addon | ||||
|         return None | ||||
|  | ||||
|     async def load(self) -> None: | ||||
|         """Start up add-on management.""" | ||||
|         # Refresh cache for all store addons | ||||
|         tasks: list[Awaitable[None]] = [ | ||||
|             store.refresh_path_cache() for store in self.store.values() | ||||
|         ] | ||||
|  | ||||
|         # Load all installed addons | ||||
|         for slug in self.data.system: | ||||
|             addon = self.local[slug] = Addon(self.coresys, slug) | ||||
|             tasks.append(addon.load()) | ||||
|  | ||||
|         # Run initial tasks | ||||
|         _LOGGER.info("Found %d installed add-ons", len(self.data.system)) | ||||
|         if tasks: | ||||
|             await asyncio.gather(*tasks) | ||||
|  | ||||
|         # Sync DNS | ||||
|         await self.sync_dns() | ||||
|  | ||||
|     async def boot(self, stage: AddonStartup) -> None: | ||||
|         """Boot add-ons with mode auto.""" | ||||
|         tasks: list[Addon] = [] | ||||
|         for addon in self.installed: | ||||
|             if addon.boot != AddonBoot.AUTO or addon.startup != stage: | ||||
|                 continue | ||||
|             tasks.append(addon) | ||||
|  | ||||
|         # Evaluate add-ons which need to be started | ||||
|         _LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks)) | ||||
|         if not tasks: | ||||
|             return | ||||
|  | ||||
|         # Start Add-ons sequential | ||||
|         # avoid issue on slow IO | ||||
|         # Config.wait_boot is deprecated. Until addons update with healthchecks, | ||||
|         # add a sleep task for it to keep the same minimum amount of wait time | ||||
|         wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)] | ||||
|         for addon in tasks: | ||||
|             try: | ||||
|                 if start_task := await addon.start(): | ||||
|                     wait_boot.append(start_task) | ||||
|             except AddonsError as err: | ||||
|                 # Check if there is an system/user issue | ||||
|                 if check_exception_chain( | ||||
|                     err, (DockerAPIError, DockerNotFound, AddonConfigurationError) | ||||
|                 ): | ||||
|                     addon.boot = AddonBoot.MANUAL | ||||
|                     addon.save_persist() | ||||
|             except HassioError: | ||||
|                 pass  # These are already handled | ||||
|             else: | ||||
|                 continue | ||||
|  | ||||
|             _LOGGER.warning("Can't start Add-on %s", addon.slug) | ||||
|  | ||||
|         # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere | ||||
|         await asyncio.gather(*wait_boot, return_exceptions=True) | ||||
|  | ||||
|     async def shutdown(self, stage: AddonStartup) -> None: | ||||
|         """Shutdown addons.""" | ||||
|         tasks: list[Addon] = [] | ||||
|         for addon in self.installed: | ||||
|             if addon.state != AddonState.STARTED or addon.startup != stage: | ||||
|                 continue | ||||
|             tasks.append(addon) | ||||
|  | ||||
|         # Evaluate add-ons which need to be stopped | ||||
|         _LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks)) | ||||
|         if not tasks: | ||||
|             return | ||||
|  | ||||
|         # Stop Add-ons sequential | ||||
|         # avoid issue on slow IO | ||||
|         for addon in tasks: | ||||
|             try: | ||||
|                 await addon.stop() | ||||
|             except Exception as err:  # pylint: disable=broad-except | ||||
|                 _LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err) | ||||
|                 capture_exception(err) | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_manager_install", | ||||
|         conditions=ADDON_UPDATE_CONDITIONS, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def install(self, slug: str) -> None: | ||||
|         """Install an add-on.""" | ||||
|         self.sys_jobs.current.reference = slug | ||||
|  | ||||
|         if slug in self.local: | ||||
|             raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning) | ||||
|         store = self.store.get(slug) | ||||
|  | ||||
|         if not store: | ||||
|             raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error) | ||||
|  | ||||
|         store.validate_availability() | ||||
|  | ||||
|         await Addon(self.coresys, slug).install() | ||||
|  | ||||
|         _LOGGER.info("Add-on '%s' successfully installed", slug) | ||||
|  | ||||
|     async def uninstall(self, slug: str, *, remove_config: bool = False) -> None: | ||||
|         """Remove an add-on.""" | ||||
|         if slug not in self.local: | ||||
|             _LOGGER.warning("Add-on %s is not installed", slug) | ||||
|             return | ||||
|  | ||||
|         shared_image = any( | ||||
|             self.local[slug].image == addon.image | ||||
|             and self.local[slug].version == addon.version | ||||
|             for addon in self.installed | ||||
|             if addon.slug != slug | ||||
|         ) | ||||
|         await self.local[slug].uninstall( | ||||
|             remove_config=remove_config, remove_image=not shared_image | ||||
|         ) | ||||
|  | ||||
|         _LOGGER.info("Add-on '%s' successfully removed", slug) | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_manager_update", | ||||
|         conditions=ADDON_UPDATE_CONDITIONS, | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def update( | ||||
|         self, slug: str, backup: bool | None = False | ||||
|     ) -> asyncio.Task | None: | ||||
|         """Update add-on. | ||||
|  | ||||
|         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||
|         if addon is started after update. Else nothing is returned. | ||||
|         """ | ||||
|         self.sys_jobs.current.reference = slug | ||||
|  | ||||
|         if slug not in self.local: | ||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||
|         addon = self.local[slug] | ||||
|  | ||||
|         if addon.is_detached: | ||||
|             raise AddonsError( | ||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||
|             ) | ||||
|         store = self.store[slug] | ||||
|  | ||||
|         if addon.version == store.version: | ||||
|             raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning) | ||||
|  | ||||
|         # Check if available, Maybe something have changed | ||||
|         store.validate_availability() | ||||
|  | ||||
|         if backup: | ||||
|             await self.sys_backups.do_backup_partial( | ||||
|                 name=f"addon_{addon.slug}_{addon.version}", | ||||
|                 homeassistant=False, | ||||
|                 addons=[addon.slug], | ||||
|             ) | ||||
|  | ||||
|         return await addon.update() | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_manager_rebuild", | ||||
|         conditions=[ | ||||
|             JobCondition.FREE_SPACE, | ||||
|             JobCondition.INTERNET_HOST, | ||||
|             JobCondition.HEALTHY, | ||||
|         ], | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def rebuild(self, slug: str) -> asyncio.Task | None: | ||||
|         """Perform a rebuild of local build add-on. | ||||
|  | ||||
|         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||
|         if addon is started after rebuild. Else nothing is returned. | ||||
|         """ | ||||
|         self.sys_jobs.current.reference = slug | ||||
|  | ||||
|         if slug not in self.local: | ||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||
|         addon = self.local[slug] | ||||
|  | ||||
|         if addon.is_detached: | ||||
|             raise AddonsError( | ||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||
|             ) | ||||
|         store = self.store[slug] | ||||
|  | ||||
|         # Check if a rebuild is possible now | ||||
|         if addon.version != store.version: | ||||
|             raise AddonsError( | ||||
|                 "Version changed, use Update instead Rebuild", _LOGGER.error | ||||
|             ) | ||||
|         if not addon.need_build: | ||||
|             raise AddonsNotSupportedError( | ||||
|                 "Can't rebuild a image based add-on", _LOGGER.error | ||||
|             ) | ||||
|  | ||||
|         return await addon.rebuild() | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_manager_restore", | ||||
|         conditions=[ | ||||
|             JobCondition.FREE_SPACE, | ||||
|             JobCondition.INTERNET_HOST, | ||||
|             JobCondition.HEALTHY, | ||||
|         ], | ||||
|         on_condition=AddonsJobError, | ||||
|     ) | ||||
|     async def restore( | ||||
|         self, slug: str, tar_file: tarfile.TarFile | ||||
|     ) -> asyncio.Task | None: | ||||
|         """Restore state of an add-on. | ||||
|  | ||||
|         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||
|         if addon is started after restore. Else nothing is returned. | ||||
|         """ | ||||
|         self.sys_jobs.current.reference = slug | ||||
|  | ||||
|         if slug not in self.local: | ||||
|             _LOGGER.debug("Add-on %s is not local available for restore", slug) | ||||
|             addon = Addon(self.coresys, slug) | ||||
|             had_ingress = False | ||||
|         else: | ||||
|             _LOGGER.debug("Add-on %s is local available for restore", slug) | ||||
|             addon = self.local[slug] | ||||
|             had_ingress = addon.ingress_panel | ||||
|  | ||||
|         wait_for_start = await addon.restore(tar_file) | ||||
|  | ||||
|         # Check if new | ||||
|         if slug not in self.local: | ||||
|             _LOGGER.info("Detect new Add-on after restore %s", slug) | ||||
|             self.local[slug] = addon | ||||
|  | ||||
|         # Update ingress | ||||
|         if had_ingress != addon.ingress_panel: | ||||
|             await self.sys_ingress.reload() | ||||
|             with suppress(HomeAssistantAPIError): | ||||
|                 await self.sys_ingress.update_hass_panel(addon) | ||||
|  | ||||
|         return wait_for_start | ||||
|  | ||||
|     @Job( | ||||
|         name="addon_manager_repair", | ||||
|         conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST], | ||||
|     ) | ||||
|     async def repair(self) -> None: | ||||
|         """Repair local add-ons.""" | ||||
|         needs_repair: list[Addon] = [] | ||||
|  | ||||
|         # Evaluate Add-ons to repair | ||||
|         for addon in self.installed: | ||||
|             if await addon.instance.exists(): | ||||
|                 continue | ||||
|             needs_repair.append(addon) | ||||
|  | ||||
|         _LOGGER.info("Found %d add-ons to repair", len(needs_repair)) | ||||
|         if not needs_repair: | ||||
|             return | ||||
|  | ||||
|         for addon in needs_repair: | ||||
|             _LOGGER.info("Repairing for add-on: %s", addon.slug) | ||||
|             with suppress(DockerError, KeyError): | ||||
|                 # Need pull a image again | ||||
|                 if not addon.need_build: | ||||
|                     await addon.instance.install(addon.version, addon.image) | ||||
|                     continue | ||||
|  | ||||
|                 # Need local lookup | ||||
|                 if addon.need_build and not addon.is_detached: | ||||
|                     store = self.store[addon.slug] | ||||
|                     # If this add-on is available for rebuild | ||||
|                     if addon.version == store.version: | ||||
|                         await addon.instance.install(addon.version, addon.image) | ||||
|                         continue | ||||
|  | ||||
|             _LOGGER.error("Can't repair %s", addon.slug) | ||||
|             with suppress(AddonsError): | ||||
|                 await self.uninstall(addon.slug) | ||||
|  | ||||
|     async def sync_dns(self) -> None: | ||||
|         """Sync add-ons DNS names.""" | ||||
|         # Update hosts | ||||
|         add_host_coros: list[Awaitable[None]] = [] | ||||
|         for addon in self.installed: | ||||
|             try: | ||||
|                 if not await addon.instance.is_running(): | ||||
|                     continue | ||||
|             except DockerError as err: | ||||
|                 _LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err) | ||||
|                 self.sys_resolution.create_issue( | ||||
|                     IssueType.CORRUPT_DOCKER, | ||||
|                     ContextType.ADDON, | ||||
|                     reference=addon.slug, | ||||
|                     suggestions=[SuggestionType.EXECUTE_REPAIR], | ||||
|                 ) | ||||
|                 capture_exception(err) | ||||
|             else: | ||||
|                 add_host_coros.append( | ||||
|                     self.sys_plugins.dns.add_host( | ||||
|                         ipv4=addon.ip_address, names=[addon.hostname], write=False | ||||
|                     ) | ||||
|                 ) | ||||
|  | ||||
|         await asyncio.gather(*add_host_coros) | ||||
|  | ||||
|         # Write hosts files | ||||
|         with suppress(CoreDNSError): | ||||
|             await self.sys_plugins.dns.write_hosts() | ||||
| @@ -1,13 +1,18 @@ | ||||
| """Init file for Supervisor add-ons.""" | ||||
|  | ||||
| from abc import ABC, abstractmethod | ||||
| from collections import defaultdict | ||||
| from collections.abc import Awaitable, Callable | ||||
| from contextlib import suppress | ||||
| from datetime import datetime | ||||
| import logging | ||||
| from pathlib import Path | ||||
| from typing import Any | ||||
|  | ||||
| from awesomeversion import AwesomeVersion, AwesomeVersionException | ||||
|  | ||||
| from supervisor.utils.dt import utc_from_timestamp | ||||
|  | ||||
| from ..const import ( | ||||
|     ATTR_ADVANCED, | ||||
|     ATTR_APPARMOR, | ||||
| @@ -64,11 +69,13 @@ from ..const import ( | ||||
|     ATTR_TIMEOUT, | ||||
|     ATTR_TMPFS, | ||||
|     ATTR_TRANSLATIONS, | ||||
|     ATTR_TYPE, | ||||
|     ATTR_UART, | ||||
|     ATTR_UDEV, | ||||
|     ATTR_URL, | ||||
|     ATTR_USB, | ||||
|     ATTR_VERSION, | ||||
|     ATTR_VERSION_TIMESTAMP, | ||||
|     ATTR_VIDEO, | ||||
|     ATTR_WATCHDOG, | ||||
|     ATTR_WEBUI, | ||||
| @@ -76,28 +83,47 @@ from ..const import ( | ||||
|     SECURITY_DISABLE, | ||||
|     SECURITY_PROFILE, | ||||
|     AddonBoot, | ||||
|     AddonBootConfig, | ||||
|     AddonStage, | ||||
|     AddonStartup, | ||||
| ) | ||||
| from ..coresys import CoreSys, CoreSysAttributes | ||||
| from ..coresys import CoreSys | ||||
| from ..docker.const import Capabilities | ||||
| from ..exceptions import AddonsNotSupportedError | ||||
| from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode | ||||
| from ..jobs.const import JOB_GROUP_ADDON | ||||
| from ..jobs.job_group import JobGroup | ||||
| from ..utils import version_is_new_enough | ||||
| from .configuration import FolderMapping | ||||
| from .const import ( | ||||
|     ATTR_BACKUP, | ||||
|     ATTR_BREAKING_VERSIONS, | ||||
|     ATTR_CODENOTARY, | ||||
|     ATTR_PATH, | ||||
|     ATTR_READ_ONLY, | ||||
|     AddonBackupMode, | ||||
|     MappingType, | ||||
| ) | ||||
| from .options import AddonOptions, UiOptions | ||||
| from .validate import RE_SERVICE, RE_VOLUME | ||||
| from .validate import RE_SERVICE | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| Data = dict[str, Any] | ||||
|  | ||||
|  | ||||
| class AddonModel(CoreSysAttributes, ABC): | ||||
| class AddonModel(JobGroup, ABC): | ||||
|     """Add-on Data layout.""" | ||||
|  | ||||
|     def __init__(self, coresys: CoreSys, slug: str): | ||||
|         """Initialize data holder.""" | ||||
|         self.coresys: CoreSys = coresys | ||||
|         super().__init__( | ||||
|             coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug | ||||
|         ) | ||||
|         self.slug: str = slug | ||||
|         self._path_icon_exists: bool = False | ||||
|         self._path_logo_exists: bool = False | ||||
|         self._path_changelog_exists: bool = False | ||||
|         self._path_documentation_exists: bool = False | ||||
|  | ||||
|     @property | ||||
|     @abstractmethod | ||||
| @@ -125,10 +151,15 @@ class AddonModel(CoreSysAttributes, ABC): | ||||
|         return self.data[ATTR_OPTIONS] | ||||
|  | ||||
|     @property | ||||
|     def boot(self) -> AddonBoot: | ||||
|         """Return boot config with prio local settings.""" | ||||
|     def boot_config(self) -> AddonBootConfig: | ||||
|         """Return boot config.""" | ||||
|         return self.data[ATTR_BOOT] | ||||
|  | ||||
|     @property | ||||
|     def boot(self) -> AddonBoot: | ||||
|         """Return boot config with prio local settings unless config is forced.""" | ||||
|         return AddonBoot(self.data[ATTR_BOOT]) | ||||
|  | ||||
|     @property | ||||
|     def auto_update(self) -> bool | None: | ||||
|         """Return if auto update is enable.""" | ||||
| @@ -206,6 +237,11 @@ class AddonModel(CoreSysAttributes, ABC): | ||||
|         """Return latest version of add-on.""" | ||||
|         return self.data[ATTR_VERSION] | ||||
|  | ||||
|     @property | ||||
|     def latest_version_timestamp(self) -> datetime: | ||||
|         """Return when latest version was first seen.""" | ||||
|         return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP]) | ||||
|  | ||||
|     @property | ||||
|     def version(self) -> AwesomeVersion: | ||||
|         """Return version of add-on.""" | ||||
| @@ -486,22 +522,22 @@ class AddonModel(CoreSysAttributes, ABC): | ||||
|     @property | ||||
|     def with_icon(self) -> bool: | ||||
|         """Return True if an icon exists.""" | ||||
|         return self.path_icon.exists() | ||||
|         return self._path_icon_exists | ||||
|  | ||||
|     @property | ||||
|     def with_logo(self) -> bool: | ||||
|         """Return True if a logo exists.""" | ||||
|         return self.path_logo.exists() | ||||
|         return self._path_logo_exists | ||||
|  | ||||
|     @property | ||||
|     def with_changelog(self) -> bool: | ||||
|         """Return True if a changelog exists.""" | ||||
|         return self.path_changelog.exists() | ||||
|         return self._path_changelog_exists | ||||
|  | ||||
|     @property | ||||
|     def with_documentation(self) -> bool: | ||||
|         """Return True if a documentation exists.""" | ||||
|         return self.path_documentation.exists() | ||||
|         return self._path_documentation_exists | ||||
|  | ||||
|     @property | ||||
|     def supported_arch(self) -> list[str]: | ||||
| @@ -532,14 +568,13 @@ class AddonModel(CoreSysAttributes, ABC): | ||||
|         return ATTR_IMAGE not in self.data | ||||
|  | ||||
|     @property | ||||
|     def map_volumes(self) -> dict[str, bool]: | ||||
|         """Return a dict of {volume: read-only} from add-on.""" | ||||
|     def map_volumes(self) -> dict[MappingType, FolderMapping]: | ||||
|         """Return a dict of {MappingType: FolderMapping} from add-on.""" | ||||
|         volumes = {} | ||||
|         for volume in self.data[ATTR_MAP]: | ||||
|             result = RE_VOLUME.match(volume) | ||||
|             if not result: | ||||
|                 continue | ||||
|             volumes[result.group(1)] = result.group(2) != "rw" | ||||
|             volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping( | ||||
|                 volume.get(ATTR_PATH), volume[ATTR_READ_ONLY] | ||||
|             ) | ||||
|  | ||||
|         return volumes | ||||
|  | ||||
| @@ -606,6 +641,22 @@ class AddonModel(CoreSysAttributes, ABC): | ||||
|         """Return Signer email address for CAS.""" | ||||
|         return self.data.get(ATTR_CODENOTARY) | ||||
|  | ||||
|     @property | ||||
|     def breaking_versions(self) -> list[AwesomeVersion]: | ||||
|         """Return breaking versions of addon.""" | ||||
|         return self.data[ATTR_BREAKING_VERSIONS] | ||||
|  | ||||
|     def refresh_path_cache(self) -> Awaitable[None]: | ||||
|         """Refresh cache of existing paths.""" | ||||
|  | ||||
|         def check_paths(): | ||||
|             self._path_icon_exists = self.path_icon.exists() | ||||
|             self._path_logo_exists = self.path_logo.exists() | ||||
|             self._path_changelog_exists = self.path_changelog.exists() | ||||
|             self._path_documentation_exists = self.path_documentation.exists() | ||||
|  | ||||
|         return self.sys_run_in_executor(check_paths) | ||||
|  | ||||
|     def validate_availability(self) -> None: | ||||
|         """Validate if addon is available for current system.""" | ||||
|         return self._validate_availability(self.data, logger=_LOGGER.error) | ||||
| @@ -640,7 +691,9 @@ class AddonModel(CoreSysAttributes, ABC): | ||||
|         # Home Assistant | ||||
|         version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT) | ||||
|         with suppress(AwesomeVersionException, TypeError): | ||||
|             if self.sys_homeassistant.version < version: | ||||
|             if version and not version_is_new_enough( | ||||
|                 self.sys_homeassistant.version, version | ||||
|             ): | ||||
|                 raise AddonsNotSupportedError( | ||||
|                     f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater", | ||||
|                     logger, | ||||
| @@ -664,19 +717,3 @@ class AddonModel(CoreSysAttributes, ABC): | ||||
|  | ||||
|         # local build | ||||
|         return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}" | ||||
|  | ||||
|     def install(self) -> Awaitable[None]: | ||||
|         """Install this add-on.""" | ||||
|         return self.sys_addons.install(self.slug) | ||||
|  | ||||
|     def uninstall(self) -> Awaitable[None]: | ||||
|         """Uninstall this add-on.""" | ||||
|         return self.sys_addons.uninstall(self.slug) | ||||
|  | ||||
|     def update(self, backup: bool | None = False) -> Awaitable[Awaitable[None] | None]: | ||||
|         """Update this add-on.""" | ||||
|         return self.sys_addons.update(self.slug, backup=backup) | ||||
|  | ||||
|     def rebuild(self) -> Awaitable[Awaitable[None] | None]: | ||||
|         """Rebuild this add-on.""" | ||||
|         return self.sys_addons.rebuild(self.slug) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Add-on Options / UI rendering.""" | ||||
|  | ||||
| import hashlib | ||||
| import logging | ||||
| from pathlib import Path | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Util add-ons functions.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Validate add-ons options schema.""" | ||||
|  | ||||
| import logging | ||||
| import re | ||||
| import secrets | ||||
| @@ -78,9 +79,12 @@ from ..const import ( | ||||
|     ATTR_STATE, | ||||
|     ATTR_STDIN, | ||||
|     ATTR_SYSTEM, | ||||
|     ATTR_SYSTEM_MANAGED, | ||||
|     ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, | ||||
|     ATTR_TIMEOUT, | ||||
|     ATTR_TMPFS, | ||||
|     ATTR_TRANSLATIONS, | ||||
|     ATTR_TYPE, | ||||
|     ATTR_UART, | ||||
|     ATTR_UDEV, | ||||
|     ATTR_URL, | ||||
| @@ -94,11 +98,11 @@ from ..const import ( | ||||
|     ROLE_ALL, | ||||
|     ROLE_DEFAULT, | ||||
|     AddonBoot, | ||||
|     AddonBootConfig, | ||||
|     AddonStage, | ||||
|     AddonStartup, | ||||
|     AddonState, | ||||
| ) | ||||
| from ..discovery.validate import valid_discovery_service | ||||
| from ..docker.const import Capabilities | ||||
| from ..validate import ( | ||||
|     docker_image, | ||||
| @@ -109,12 +113,23 @@ from ..validate import ( | ||||
|     uuid_match, | ||||
|     version_tag, | ||||
| ) | ||||
| from .const import ATTR_BACKUP, ATTR_CODENOTARY, RE_SLUG, AddonBackupMode | ||||
| from .const import ( | ||||
|     ATTR_BACKUP, | ||||
|     ATTR_BREAKING_VERSIONS, | ||||
|     ATTR_CODENOTARY, | ||||
|     ATTR_PATH, | ||||
|     ATTR_READ_ONLY, | ||||
|     RE_SLUG, | ||||
|     AddonBackupMode, | ||||
|     MappingType, | ||||
| ) | ||||
| from .options import RE_SCHEMA_ELEMENT | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$") | ||||
| RE_VOLUME = re.compile( | ||||
|     r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$" | ||||
| ) | ||||
| RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") | ||||
|  | ||||
|  | ||||
| @@ -143,6 +158,9 @@ RE_MACHINE = re.compile( | ||||
|     r"|raspberrypi3" | ||||
|     r"|raspberrypi4-64" | ||||
|     r"|raspberrypi4" | ||||
|     r"|raspberrypi5-64" | ||||
|     r"|yellow" | ||||
|     r"|green" | ||||
|     r"|tinker" | ||||
|     r")$" | ||||
| ) | ||||
| @@ -175,20 +193,6 @@ def _warn_addon_config(config: dict[str, Any]): | ||||
|             name, | ||||
|         ) | ||||
|  | ||||
|     invalid_services: list[str] = [] | ||||
|     for service in config.get(ATTR_DISCOVERY, []): | ||||
|         try: | ||||
|             valid_discovery_service(service) | ||||
|         except vol.Invalid: | ||||
|             invalid_services.append(service) | ||||
|  | ||||
|     if invalid_services: | ||||
|         _LOGGER.warning( | ||||
|             "Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s", | ||||
|             ", ".join(invalid_services), | ||||
|             name, | ||||
|         ) | ||||
|  | ||||
|     return config | ||||
|  | ||||
|  | ||||
| @@ -210,9 +214,9 @@ def _migrate_addon_config(protocol=False): | ||||
|                     name, | ||||
|                 ) | ||||
|             if value == "before": | ||||
|                 config[ATTR_STARTUP] = AddonStartup.SERVICES.value | ||||
|                 config[ATTR_STARTUP] = AddonStartup.SERVICES | ||||
|             elif value == "after": | ||||
|                 config[ATTR_STARTUP] = AddonStartup.APPLICATION.value | ||||
|                 config[ATTR_STARTUP] = AddonStartup.APPLICATION | ||||
|  | ||||
|         # UART 2021-01-20 | ||||
|         if "auto_uart" in config: | ||||
| @@ -258,6 +262,48 @@ def _migrate_addon_config(protocol=False): | ||||
|                     name, | ||||
|                 ) | ||||
|  | ||||
|         # 2023-11 "map" entries can also be dict to allow path configuration | ||||
|         volumes = [] | ||||
|         for entry in config.get(ATTR_MAP, []): | ||||
|             if isinstance(entry, dict): | ||||
|                 volumes.append(entry) | ||||
|             if isinstance(entry, str): | ||||
|                 result = RE_VOLUME.match(entry) | ||||
|                 if not result: | ||||
|                     continue | ||||
|                 volumes.append( | ||||
|                     { | ||||
|                         ATTR_TYPE: result.group(1), | ||||
|                         ATTR_READ_ONLY: result.group(2) != "rw", | ||||
|                     } | ||||
|                 ) | ||||
|  | ||||
|         if volumes: | ||||
|             config[ATTR_MAP] = volumes | ||||
|  | ||||
|         # 2023-10 "config" became "homeassistant" so /config can be used for addon's public config | ||||
|         if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes): | ||||
|             if any( | ||||
|                 volume | ||||
|                 and volume[ATTR_TYPE] | ||||
|                 in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG} | ||||
|                 for volume in volumes | ||||
|             ): | ||||
|                 _LOGGER.warning( | ||||
|                     "Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s", | ||||
|                     MappingType.ADDON_CONFIG, | ||||
|                     MappingType.HOMEASSISTANT_CONFIG, | ||||
|                     MappingType.CONFIG, | ||||
|                     name, | ||||
|                 ) | ||||
|             else: | ||||
|                 _LOGGER.debug( | ||||
|                     "Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s", | ||||
|                     MappingType.CONFIG, | ||||
|                     MappingType.HOMEASSISTANT_CONFIG, | ||||
|                     name, | ||||
|                 ) | ||||
|  | ||||
|         return config | ||||
|  | ||||
|     return _migrate | ||||
| @@ -276,7 +322,9 @@ _SCHEMA_ADDON_CONFIG = vol.Schema( | ||||
|         vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce( | ||||
|             AddonStartup | ||||
|         ), | ||||
|         vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot), | ||||
|         vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce( | ||||
|             AddonBootConfig | ||||
|         ), | ||||
|         vol.Optional(ATTR_INIT, default=True): vol.Boolean(), | ||||
|         vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage), | ||||
| @@ -306,7 +354,15 @@ _SCHEMA_ADDON_CONFIG = vol.Schema( | ||||
|         vol.Optional(ATTR_DEVICES): [str], | ||||
|         vol.Optional(ATTR_UDEV, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)], | ||||
|         vol.Optional(ATTR_MAP, default=list): [ | ||||
|             vol.Schema( | ||||
|                 { | ||||
|                     vol.Required(ATTR_TYPE): vol.Coerce(MappingType), | ||||
|                     vol.Optional(ATTR_READ_ONLY, default=True): bool, | ||||
|                     vol.Optional(ATTR_PATH): str, | ||||
|                 } | ||||
|             ) | ||||
|         ], | ||||
|         vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str}, | ||||
|         vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)], | ||||
|         vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(), | ||||
| @@ -358,6 +414,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema( | ||||
|             vol.Coerce(int), vol.Range(min=10, max=300) | ||||
|         ), | ||||
|         vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag], | ||||
|     }, | ||||
|     extra=vol.REMOVE_EXTRA, | ||||
| ) | ||||
| @@ -416,6 +473,8 @@ SCHEMA_ADDON_USER = vol.Schema( | ||||
|         vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(), | ||||
|         vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str), | ||||
|     }, | ||||
|     extra=vol.REMOVE_EXTRA, | ||||
| ) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor RESTful API.""" | ||||
|  | ||||
| from functools import partial | ||||
| import logging | ||||
| from pathlib import Path | ||||
| @@ -8,12 +9,14 @@ from aiohttp import web | ||||
|  | ||||
| from ..const import AddonState | ||||
| from ..coresys import CoreSys, CoreSysAttributes | ||||
| from ..exceptions import APIAddonNotInstalled | ||||
| from ..exceptions import APIAddonNotInstalled, HostNotSupportedError | ||||
| from ..utils.sentry import capture_exception | ||||
| from .addons import APIAddons | ||||
| from .audio import APIAudio | ||||
| from .auth import APIAuth | ||||
| from .backups import APIBackups | ||||
| from .cli import APICli | ||||
| from .const import CONTENT_TYPE_TEXT | ||||
| from .discovery import APIDiscovery | ||||
| from .dns import APICoreDNS | ||||
| from .docker import APIDocker | ||||
| @@ -35,7 +38,7 @@ from .security import APISecurity | ||||
| from .services import APIServices | ||||
| from .store import APIStore | ||||
| from .supervisor import APISupervisor | ||||
| from .utils import api_process | ||||
| from .utils import api_process, api_process_raw | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -66,11 +69,17 @@ class RestAPI(CoreSysAttributes): | ||||
|         ) | ||||
|  | ||||
|         # service stuff | ||||
|         self._runner: web.AppRunner = web.AppRunner(self.webapp) | ||||
|         self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5) | ||||
|         self._site: web.TCPSite | None = None | ||||
|  | ||||
|         # share single host API handler for reuse in logging endpoints | ||||
|         self._api_host: APIHost | None = None | ||||
|  | ||||
|     async def load(self) -> None: | ||||
|         """Register REST API Calls.""" | ||||
|         self._api_host = APIHost() | ||||
|         self._api_host.coresys = self.coresys | ||||
|  | ||||
|         self._register_addons() | ||||
|         self._register_audio() | ||||
|         self._register_auth() | ||||
| @@ -100,10 +109,41 @@ class RestAPI(CoreSysAttributes): | ||||
|  | ||||
|         await self.start() | ||||
|  | ||||
|     def _register_advanced_logs(self, path: str, syslog_identifier: str): | ||||
|         """Register logs endpoint for a given path, returning logs for single syslog identifier.""" | ||||
|  | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
|                 web.get( | ||||
|                     f"{path}/logs", | ||||
|                     partial(self._api_host.advanced_logs, identifier=syslog_identifier), | ||||
|                 ), | ||||
|                 web.get( | ||||
|                     f"{path}/logs/follow", | ||||
|                     partial( | ||||
|                         self._api_host.advanced_logs, | ||||
|                         identifier=syslog_identifier, | ||||
|                         follow=True, | ||||
|                     ), | ||||
|                 ), | ||||
|                 web.get( | ||||
|                     f"{path}/logs/boots/{{bootid}}", | ||||
|                     partial(self._api_host.advanced_logs, identifier=syslog_identifier), | ||||
|                 ), | ||||
|                 web.get( | ||||
|                     f"{path}/logs/boots/{{bootid}}/follow", | ||||
|                     partial( | ||||
|                         self._api_host.advanced_logs, | ||||
|                         identifier=syslog_identifier, | ||||
|                         follow=True, | ||||
|                     ), | ||||
|                 ), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|     def _register_host(self) -> None: | ||||
|         """Register hostcontrol functions.""" | ||||
|         api_host = APIHost() | ||||
|         api_host.coresys = self.coresys | ||||
|         api_host = self._api_host | ||||
|  | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
| @@ -180,12 +220,16 @@ class RestAPI(CoreSysAttributes): | ||||
|                 web.post("/os/config/sync", api_os.config_sync), | ||||
|                 web.post("/os/datadisk/move", api_os.migrate_data), | ||||
|                 web.get("/os/datadisk/list", api_os.list_data), | ||||
|                 web.post("/os/datadisk/wipe", api_os.wipe_data), | ||||
|                 web.post("/os/boot-slot", api_os.set_boot_slot), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         # Boards endpoints | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
|                 web.get("/os/boards/green", api_os.boards_green_info), | ||||
|                 web.post("/os/boards/green", api_os.boards_green_options), | ||||
|                 web.get("/os/boards/yellow", api_os.boards_yellow_info), | ||||
|                 web.post("/os/boards/yellow", api_os.boards_yellow_options), | ||||
|                 web.get("/os/boards/{board}", api_os.boards_other_info), | ||||
| @@ -215,6 +259,8 @@ class RestAPI(CoreSysAttributes): | ||||
|                 web.get("/jobs/info", api_jobs.info), | ||||
|                 web.post("/jobs/options", api_jobs.options), | ||||
|                 web.post("/jobs/reset", api_jobs.reset), | ||||
|                 web.get("/jobs/{uuid}", api_jobs.job_info), | ||||
|                 web.delete("/jobs/{uuid}", api_jobs.remove_job), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
| @@ -253,11 +299,11 @@ class RestAPI(CoreSysAttributes): | ||||
|             [ | ||||
|                 web.get("/multicast/info", api_multicast.info), | ||||
|                 web.get("/multicast/stats", api_multicast.stats), | ||||
|                 web.get("/multicast/logs", api_multicast.logs), | ||||
|                 web.post("/multicast/update", api_multicast.update), | ||||
|                 web.post("/multicast/restart", api_multicast.restart), | ||||
|             ] | ||||
|         ) | ||||
|         self._register_advanced_logs("/multicast", "hassio_multicast") | ||||
|  | ||||
|     def _register_hardware(self) -> None: | ||||
|         """Register hardware functions.""" | ||||
| @@ -330,6 +376,7 @@ class RestAPI(CoreSysAttributes): | ||||
|                 web.post("/auth", api_auth.auth), | ||||
|                 web.post("/auth/reset", api_auth.reset), | ||||
|                 web.delete("/auth/cache", api_auth.cache), | ||||
|                 web.get("/auth/list", api_auth.list_users), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
| @@ -343,7 +390,6 @@ class RestAPI(CoreSysAttributes): | ||||
|                 web.get("/supervisor/ping", api_supervisor.ping), | ||||
|                 web.get("/supervisor/info", api_supervisor.info), | ||||
|                 web.get("/supervisor/stats", api_supervisor.stats), | ||||
|                 web.get("/supervisor/logs", api_supervisor.logs), | ||||
|                 web.post("/supervisor/update", api_supervisor.update), | ||||
|                 web.post("/supervisor/reload", api_supervisor.reload), | ||||
|                 web.post("/supervisor/restart", api_supervisor.restart), | ||||
| @@ -352,6 +398,38 @@ class RestAPI(CoreSysAttributes): | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         async def get_supervisor_logs(*args, **kwargs): | ||||
|             try: | ||||
|                 return await self._api_host.advanced_logs_handler( | ||||
|                     *args, identifier="hassio_supervisor", **kwargs | ||||
|                 ) | ||||
|             except Exception as err:  # pylint: disable=broad-exception-caught | ||||
|                 # Supervisor logs are critical, so catch everything, log the exception | ||||
|                 # and try to return Docker container logs as the fallback | ||||
|                 _LOGGER.exception( | ||||
|                     "Failed to get supervisor logs using advanced_logs API" | ||||
|                 ) | ||||
|                 if not isinstance(err, HostNotSupportedError): | ||||
|                     # No need to capture HostNotSupportedError to Sentry, the cause | ||||
|                     # is known and reported to the user using the resolution center. | ||||
|                     capture_exception(err) | ||||
|                 return await api_supervisor.logs(*args, **kwargs) | ||||
|  | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
|                 web.get("/supervisor/logs", get_supervisor_logs), | ||||
|                 web.get( | ||||
|                     "/supervisor/logs/follow", | ||||
|                     partial(get_supervisor_logs, follow=True), | ||||
|                 ), | ||||
|                 web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs), | ||||
|                 web.get( | ||||
|                     "/supervisor/logs/boots/{bootid}/follow", | ||||
|                     partial(get_supervisor_logs, follow=True), | ||||
|                 ), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|     def _register_homeassistant(self) -> None: | ||||
|         """Register Home Assistant functions.""" | ||||
|         api_hass = APIHomeAssistant() | ||||
| @@ -360,7 +438,6 @@ class RestAPI(CoreSysAttributes): | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
|                 web.get("/core/info", api_hass.info), | ||||
|                 web.get("/core/logs", api_hass.logs), | ||||
|                 web.get("/core/stats", api_hass.stats), | ||||
|                 web.post("/core/options", api_hass.options), | ||||
|                 web.post("/core/update", api_hass.update), | ||||
| @@ -372,11 +449,12 @@ class RestAPI(CoreSysAttributes): | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         self._register_advanced_logs("/core", "homeassistant") | ||||
|  | ||||
|         # Reroute from legacy | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
|                 web.get("/homeassistant/info", api_hass.info), | ||||
|                 web.get("/homeassistant/logs", api_hass.logs), | ||||
|                 web.get("/homeassistant/stats", api_hass.stats), | ||||
|                 web.post("/homeassistant/options", api_hass.options), | ||||
|                 web.post("/homeassistant/restart", api_hass.restart), | ||||
| @@ -388,6 +466,8 @@ class RestAPI(CoreSysAttributes): | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         self._register_advanced_logs("/homeassistant", "homeassistant") | ||||
|  | ||||
|     def _register_proxy(self) -> None: | ||||
|         """Register Home Assistant API Proxy.""" | ||||
|         api_proxy = APIProxy() | ||||
| @@ -429,18 +509,39 @@ class RestAPI(CoreSysAttributes): | ||||
|                 web.post("/addons/{addon}/stop", api_addons.stop), | ||||
|                 web.post("/addons/{addon}/restart", api_addons.restart), | ||||
|                 web.post("/addons/{addon}/options", api_addons.options), | ||||
|                 web.post("/addons/{addon}/sys_options", api_addons.sys_options), | ||||
|                 web.post( | ||||
|                     "/addons/{addon}/options/validate", api_addons.options_validate | ||||
|                 ), | ||||
|                 web.get("/addons/{addon}/options/config", api_addons.options_config), | ||||
|                 web.post("/addons/{addon}/rebuild", api_addons.rebuild), | ||||
|                 web.get("/addons/{addon}/logs", api_addons.logs), | ||||
|                 web.post("/addons/{addon}/stdin", api_addons.stdin), | ||||
|                 web.post("/addons/{addon}/security", api_addons.security), | ||||
|                 web.get("/addons/{addon}/stats", api_addons.stats), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         @api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT) | ||||
|         async def get_addon_logs(request, *args, **kwargs): | ||||
|             addon = api_addons.get_addon_for_request(request) | ||||
|             kwargs["identifier"] = f"addon_{addon.slug}" | ||||
|             return await self._api_host.advanced_logs(request, *args, **kwargs) | ||||
|  | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
|                 web.get("/addons/{addon}/logs", get_addon_logs), | ||||
|                 web.get( | ||||
|                     "/addons/{addon}/logs/follow", | ||||
|                     partial(get_addon_logs, follow=True), | ||||
|                 ), | ||||
|                 web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs), | ||||
|                 web.get( | ||||
|                     "/addons/{addon}/logs/boots/{bootid}/follow", | ||||
|                     partial(get_addon_logs, follow=True), | ||||
|                 ), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         # Legacy routing to support requests for not installed addons | ||||
|         api_store = APIStore() | ||||
|         api_store.coresys = self.coresys | ||||
| @@ -485,6 +586,8 @@ class RestAPI(CoreSysAttributes): | ||||
|                 web.get("/backups/info", api_backups.info), | ||||
|                 web.post("/backups/options", api_backups.options), | ||||
|                 web.post("/backups/reload", api_backups.reload), | ||||
|                 web.post("/backups/freeze", api_backups.freeze), | ||||
|                 web.post("/backups/thaw", api_backups.thaw), | ||||
|                 web.post("/backups/new/full", api_backups.backup_full), | ||||
|                 web.post("/backups/new/partial", api_backups.backup_partial), | ||||
|                 web.post("/backups/new/upload", api_backups.upload), | ||||
| @@ -536,7 +639,6 @@ class RestAPI(CoreSysAttributes): | ||||
|             [ | ||||
|                 web.get("/dns/info", api_dns.info), | ||||
|                 web.get("/dns/stats", api_dns.stats), | ||||
|                 web.get("/dns/logs", api_dns.logs), | ||||
|                 web.post("/dns/update", api_dns.update), | ||||
|                 web.post("/dns/options", api_dns.options), | ||||
|                 web.post("/dns/restart", api_dns.restart), | ||||
| @@ -544,18 +646,17 @@ class RestAPI(CoreSysAttributes): | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         self._register_advanced_logs("/dns", "hassio_dns") | ||||
|  | ||||
|     def _register_audio(self) -> None: | ||||
|         """Register Audio functions.""" | ||||
|         api_audio = APIAudio() | ||||
|         api_audio.coresys = self.coresys | ||||
|         api_host = APIHost() | ||||
|         api_host.coresys = self.coresys | ||||
|  | ||||
|         self.webapp.add_routes( | ||||
|             [ | ||||
|                 web.get("/audio/info", api_audio.info), | ||||
|                 web.get("/audio/stats", api_audio.stats), | ||||
|                 web.get("/audio/logs", api_audio.logs), | ||||
|                 web.post("/audio/update", api_audio.update), | ||||
|                 web.post("/audio/restart", api_audio.restart), | ||||
|                 web.post("/audio/reload", api_audio.reload), | ||||
| @@ -568,6 +669,8 @@ class RestAPI(CoreSysAttributes): | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         self._register_advanced_logs("/audio", "hassio_audio") | ||||
|  | ||||
|     def _register_mounts(self) -> None: | ||||
|         """Register mounts endpoints.""" | ||||
|         api_mounts = APIMounts() | ||||
| @@ -594,7 +697,6 @@ class RestAPI(CoreSysAttributes): | ||||
|                 web.get("/store", api_store.store_info), | ||||
|                 web.get("/store/addons", api_store.addons_list), | ||||
|                 web.get("/store/addons/{addon}", api_store.addons_addon_info), | ||||
|                 web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info), | ||||
|                 web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon), | ||||
|                 web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo), | ||||
|                 web.get( | ||||
| @@ -616,6 +718,8 @@ class RestAPI(CoreSysAttributes): | ||||
|                     "/store/addons/{addon}/update/{version}", | ||||
|                     api_store.addons_addon_update, | ||||
|                 ), | ||||
|                 # Must be below others since it has a wildcard in resource path | ||||
|                 web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info), | ||||
|                 web.post("/store/reload", api_store.reload), | ||||
|                 web.get("/store/repositories", api_store.repositories_list), | ||||
|                 web.get( | ||||
| @@ -667,9 +771,7 @@ class RestAPI(CoreSysAttributes): | ||||
|     async def start(self) -> None: | ||||
|         """Run RESTful API webserver.""" | ||||
|         await self._runner.setup() | ||||
|         self._site = web.TCPSite( | ||||
|             self._runner, host="0.0.0.0", port=80, shutdown_timeout=5 | ||||
|         ) | ||||
|         self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80) | ||||
|  | ||||
|         try: | ||||
|             await self._site.start() | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Home Assistant RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| import logging | ||||
| @@ -8,8 +9,8 @@ from aiohttp import web | ||||
| import voluptuous as vol | ||||
| from voluptuous.humanize import humanize_error | ||||
|  | ||||
| from ..addons import AnyAddon | ||||
| from ..addons.addon import Addon | ||||
| from ..addons.manager import AnyAddon | ||||
| from ..addons.utils import rating_security | ||||
| from ..const import ( | ||||
|     ATTR_ADDONS, | ||||
| @@ -81,6 +82,8 @@ from ..const import ( | ||||
|     ATTR_STARTUP, | ||||
|     ATTR_STATE, | ||||
|     ATTR_STDIN, | ||||
|     ATTR_SYSTEM_MANAGED, | ||||
|     ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, | ||||
|     ATTR_TRANSLATIONS, | ||||
|     ATTR_UART, | ||||
|     ATTR_UDEV, | ||||
| @@ -95,6 +98,7 @@ from ..const import ( | ||||
|     ATTR_WEBUI, | ||||
|     REQUEST_FROM, | ||||
|     AddonBoot, | ||||
|     AddonBootConfig, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..docker.stats import DockerStats | ||||
| @@ -106,8 +110,8 @@ from ..exceptions import ( | ||||
|     PwnedSecret, | ||||
| ) | ||||
| from ..validate import docker_ports | ||||
| from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY | ||||
| from .utils import api_process, api_process_raw, api_validate, json_loads | ||||
| from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED | ||||
| from .utils import api_process, api_validate, json_loads | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -126,15 +130,26 @@ SCHEMA_OPTIONS = vol.Schema( | ||||
|     } | ||||
| ) | ||||
|  | ||||
| # pylint: disable=no-value-for-parameter | ||||
| SCHEMA_SYS_OPTIONS = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(), | ||||
|         vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()}) | ||||
|  | ||||
| SCHEMA_UNINSTALL = vol.Schema( | ||||
|     {vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()} | ||||
| ) | ||||
| # pylint: enable=no-value-for-parameter | ||||
|  | ||||
|  | ||||
| class APIAddons(CoreSysAttributes): | ||||
|     """Handle RESTful API for add-on functions.""" | ||||
|  | ||||
|     def _extract_addon(self, request: web.Request) -> Addon: | ||||
|         """Return addon, throw an exception it it doesn't exist.""" | ||||
|     def get_addon_for_request(self, request: web.Request) -> Addon: | ||||
|         """Return addon, throw an exception if it doesn't exist.""" | ||||
|         addon_slug: str = request.match_info.get("addon") | ||||
|  | ||||
|         # Lookup itself | ||||
| @@ -174,6 +189,7 @@ class APIAddons(CoreSysAttributes): | ||||
|                 ATTR_URL: addon.url, | ||||
|                 ATTR_ICON: addon.with_icon, | ||||
|                 ATTR_LOGO: addon.with_logo, | ||||
|                 ATTR_SYSTEM_MANAGED: addon.system_managed, | ||||
|             } | ||||
|             for addon in self.sys_addons.installed | ||||
|         ] | ||||
| @@ -187,7 +203,7 @@ class APIAddons(CoreSysAttributes): | ||||
|  | ||||
|     async def info(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Return add-on information.""" | ||||
|         addon: AnyAddon = self._extract_addon(request) | ||||
|         addon: AnyAddon = self.get_addon_for_request(request) | ||||
|  | ||||
|         data = { | ||||
|             ATTR_NAME: addon.name, | ||||
| @@ -202,6 +218,7 @@ class APIAddons(CoreSysAttributes): | ||||
|             ATTR_VERSION_LATEST: addon.latest_version, | ||||
|             ATTR_PROTECTED: addon.protected, | ||||
|             ATTR_RATING: rating_security(addon), | ||||
|             ATTR_BOOT_CONFIG: addon.boot_config, | ||||
|             ATTR_BOOT: addon.boot, | ||||
|             ATTR_OPTIONS: addon.options, | ||||
|             ATTR_SCHEMA: addon.schema_ui, | ||||
| @@ -261,6 +278,8 @@ class APIAddons(CoreSysAttributes): | ||||
|             ATTR_WATCHDOG: addon.watchdog, | ||||
|             ATTR_DEVICES: addon.static_devices | ||||
|             + [device.path for device in addon.devices], | ||||
|             ATTR_SYSTEM_MANAGED: addon.system_managed, | ||||
|             ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry, | ||||
|         } | ||||
|  | ||||
|         return data | ||||
| @@ -268,7 +287,7 @@ class APIAddons(CoreSysAttributes): | ||||
|     @api_process | ||||
|     async def options(self, request: web.Request) -> None: | ||||
|         """Store user options for add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|  | ||||
|         # Update secrets for validation | ||||
|         await self.sys_homeassistant.secrets.reload() | ||||
| @@ -283,6 +302,10 @@ class APIAddons(CoreSysAttributes): | ||||
|         if ATTR_OPTIONS in body: | ||||
|             addon.options = body[ATTR_OPTIONS] | ||||
|         if ATTR_BOOT in body: | ||||
|             if addon.boot_config == AddonBootConfig.MANUAL_ONLY: | ||||
|                 raise APIError( | ||||
|                     f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed" | ||||
|                 ) | ||||
|             addon.boot = body[ATTR_BOOT] | ||||
|         if ATTR_AUTO_UPDATE in body: | ||||
|             addon.auto_update = body[ATTR_AUTO_UPDATE] | ||||
| @@ -300,10 +323,24 @@ class APIAddons(CoreSysAttributes): | ||||
|  | ||||
|         addon.save_persist() | ||||
|  | ||||
|     @api_process | ||||
|     async def sys_options(self, request: web.Request) -> None: | ||||
|         """Store system options for an add-on.""" | ||||
|         addon = self.get_addon_for_request(request) | ||||
|  | ||||
|         # Validate/Process Body | ||||
|         body = await api_validate(SCHEMA_SYS_OPTIONS, request) | ||||
|         if ATTR_SYSTEM_MANAGED in body: | ||||
|             addon.system_managed = body[ATTR_SYSTEM_MANAGED] | ||||
|         if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body: | ||||
|             addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] | ||||
|  | ||||
|         addon.save_persist() | ||||
|  | ||||
|     @api_process | ||||
|     async def options_validate(self, request: web.Request) -> None: | ||||
|         """Validate user options for add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|         data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False} | ||||
|  | ||||
|         options = await request.json(loads=json_loads) or addon.options | ||||
| @@ -345,7 +382,7 @@ class APIAddons(CoreSysAttributes): | ||||
|         slug: str = request.match_info.get("addon") | ||||
|         if slug != "self": | ||||
|             raise APIForbidden("This can be only read by the Add-on itself!") | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|  | ||||
|         # Lookup/reload secrets | ||||
|         await self.sys_homeassistant.secrets.reload() | ||||
| @@ -357,7 +394,7 @@ class APIAddons(CoreSysAttributes): | ||||
|     @api_process | ||||
|     async def security(self, request: web.Request) -> None: | ||||
|         """Store security options for add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|         body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) | ||||
|  | ||||
|         if ATTR_PROTECTED in body: | ||||
| @@ -369,7 +406,7 @@ class APIAddons(CoreSysAttributes): | ||||
|     @api_process | ||||
|     async def stats(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Return resource information.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|  | ||||
|         stats: DockerStats = await addon.stats() | ||||
|  | ||||
| @@ -385,48 +422,47 @@ class APIAddons(CoreSysAttributes): | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
|     def uninstall(self, request: web.Request) -> Awaitable[None]: | ||||
|     async def uninstall(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Uninstall add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         return asyncio.shield(addon.uninstall()) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|         body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request) | ||||
|         return await asyncio.shield( | ||||
|             self.sys_addons.uninstall( | ||||
|                 addon.slug, remove_config=body[ATTR_REMOVE_CONFIG] | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     async def start(self, request: web.Request) -> None: | ||||
|         """Start add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|         if start_task := await asyncio.shield(addon.start()): | ||||
|             await start_task | ||||
|  | ||||
|     @api_process | ||||
|     def stop(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Stop add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|         return asyncio.shield(addon.stop()) | ||||
|  | ||||
|     @api_process | ||||
|     async def restart(self, request: web.Request) -> None: | ||||
|         """Restart add-on.""" | ||||
|         addon: Addon = self._extract_addon(request) | ||||
|         addon: Addon = self.get_addon_for_request(request) | ||||
|         if start_task := await asyncio.shield(addon.restart()): | ||||
|             await start_task | ||||
|  | ||||
|     @api_process | ||||
|     async def rebuild(self, request: web.Request) -> None: | ||||
|         """Rebuild local build add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         if start_task := await asyncio.shield(addon.rebuild()): | ||||
|         addon = self.get_addon_for_request(request) | ||||
|         if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)): | ||||
|             await start_task | ||||
|  | ||||
|     @api_process_raw(CONTENT_TYPE_BINARY) | ||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||
|         """Return logs from add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         return addon.logs() | ||||
|  | ||||
|     @api_process | ||||
|     async def stdin(self, request: web.Request) -> None: | ||||
|         """Write to stdin of add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         addon = self.get_addon_for_request(request) | ||||
|         if not addon.with_stdin: | ||||
|             raise APIError(f"STDIN not supported the {addon.slug} add-on") | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,12 @@ | ||||
| """Init file for Supervisor Audio RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from dataclasses import asdict | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from aiohttp import web | ||||
| import attr | ||||
| import voluptuous as vol | ||||
|  | ||||
| from ..const import ( | ||||
| @@ -35,8 +36,7 @@ from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError | ||||
| from ..host.sound import StreamType | ||||
| from ..validate import version_tag | ||||
| from .const import CONTENT_TYPE_BINARY | ||||
| from .utils import api_process, api_process_raw, api_validate | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -76,15 +76,11 @@ class APIAudio(CoreSysAttributes): | ||||
|             ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update, | ||||
|             ATTR_HOST: str(self.sys_docker.network.audio), | ||||
|             ATTR_AUDIO: { | ||||
|                 ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards], | ||||
|                 ATTR_INPUT: [ | ||||
|                     attr.asdict(stream) for stream in self.sys_host.sound.inputs | ||||
|                 ], | ||||
|                 ATTR_OUTPUT: [ | ||||
|                     attr.asdict(stream) for stream in self.sys_host.sound.outputs | ||||
|                 ], | ||||
|                 ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards], | ||||
|                 ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs], | ||||
|                 ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs], | ||||
|                 ATTR_APPLICATION: [ | ||||
|                     attr.asdict(stream) for stream in self.sys_host.sound.applications | ||||
|                     asdict(stream) for stream in self.sys_host.sound.applications | ||||
|                 ], | ||||
|             }, | ||||
|         } | ||||
| @@ -115,11 +111,6 @@ class APIAudio(CoreSysAttributes): | ||||
|             raise APIError(f"Version {version} is already in use") | ||||
|         await asyncio.shield(self.sys_plugins.audio.update(version)) | ||||
|  | ||||
|     @api_process_raw(CONTENT_TYPE_BINARY) | ||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||
|         """Return Audio Docker logs.""" | ||||
|         return self.sys_plugins.audio.logs() | ||||
|  | ||||
|     @api_process | ||||
|     def restart(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Restart Audio plugin.""" | ||||
|   | ||||
| @@ -1,6 +1,8 @@ | ||||
| """Init file for Supervisor auth/SSO RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from aiohttp import BasicAuth, web | ||||
| from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE | ||||
| @@ -8,10 +10,19 @@ from aiohttp.web_exceptions import HTTPUnauthorized | ||||
| import voluptuous as vol | ||||
|  | ||||
| from ..addons.addon import Addon | ||||
| from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM | ||||
| from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIForbidden | ||||
| from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL | ||||
| from ..utils.json import json_loads | ||||
| from .const import ( | ||||
|     ATTR_GROUP_IDS, | ||||
|     ATTR_IS_ACTIVE, | ||||
|     ATTR_IS_OWNER, | ||||
|     ATTR_LOCAL_ONLY, | ||||
|     ATTR_USERS, | ||||
|     CONTENT_TYPE_JSON, | ||||
|     CONTENT_TYPE_URL, | ||||
| ) | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
| @@ -67,7 +78,7 @@ class APIAuth(CoreSysAttributes): | ||||
|  | ||||
|         # Json | ||||
|         if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: | ||||
|             data = await request.json() | ||||
|             data = await request.json(loads=json_loads) | ||||
|             return await self._process_dict(request, addon, data) | ||||
|  | ||||
|         # URL encoded | ||||
| @@ -89,3 +100,21 @@ class APIAuth(CoreSysAttributes): | ||||
|     async def cache(self, request: web.Request) -> None: | ||||
|         """Process cache reset request.""" | ||||
|         self.sys_auth.reset_data() | ||||
|  | ||||
|     @api_process | ||||
|     async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]: | ||||
|         """List users on the Home Assistant instance.""" | ||||
|         return { | ||||
|             ATTR_USERS: [ | ||||
|                 { | ||||
|                     ATTR_USERNAME: user[ATTR_USERNAME], | ||||
|                     ATTR_NAME: user[ATTR_NAME], | ||||
|                     ATTR_IS_OWNER: user[ATTR_IS_OWNER], | ||||
|                     ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE], | ||||
|                     ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY], | ||||
|                     ATTR_GROUP_IDS: user[ATTR_GROUP_IDS], | ||||
|                 } | ||||
|                 for user in await self.sys_auth.list_users() | ||||
|                 if user[ATTR_USERNAME] | ||||
|             ] | ||||
|         } | ||||
|   | ||||
| @@ -1,5 +1,8 @@ | ||||
| """Backups RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Callable | ||||
| import errno | ||||
| import logging | ||||
| from pathlib import Path | ||||
| import re | ||||
| @@ -10,6 +13,7 @@ from aiohttp import web | ||||
| from aiohttp.hdrs import CONTENT_DISPOSITION | ||||
| import voluptuous as vol | ||||
|  | ||||
| from ..backups.backup import Backup | ||||
| from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale | ||||
| from ..const import ( | ||||
|     ATTR_ADDONS, | ||||
| @@ -20,6 +24,7 @@ from ..const import ( | ||||
|     ATTR_DAYS_UNTIL_STALE, | ||||
|     ATTR_FOLDERS, | ||||
|     ATTR_HOMEASSISTANT, | ||||
|     ATTR_HOMEASSISTANT_EXCLUDE_DATABASE, | ||||
|     ATTR_LOCATON, | ||||
|     ATTR_NAME, | ||||
|     ATTR_PASSWORD, | ||||
| @@ -28,13 +33,18 @@ from ..const import ( | ||||
|     ATTR_SIZE, | ||||
|     ATTR_SLUG, | ||||
|     ATTR_SUPERVISOR_VERSION, | ||||
|     ATTR_TIMEOUT, | ||||
|     ATTR_TYPE, | ||||
|     ATTR_VERSION, | ||||
|     BusEvent, | ||||
|     CoreState, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError | ||||
| from ..jobs import JobSchedulerOptions | ||||
| from ..mounts.const import MountUsage | ||||
| from .const import CONTENT_TYPE_TAR | ||||
| from ..resolution.const import UnhealthyReason | ||||
| from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
| @@ -46,23 +56,29 @@ RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") | ||||
| _ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT] | ||||
|  | ||||
| # pylint: disable=no-value-for-parameter | ||||
| SCHEMA_RESTORE_PARTIAL = vol.Schema( | ||||
| SCHEMA_RESTORE_FULL = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_PASSWORD): vol.Maybe(str), | ||||
|         vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend( | ||||
|     { | ||||
|         vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), | ||||
|         vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()), | ||||
|         vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)}) | ||||
|  | ||||
| SCHEMA_BACKUP_FULL = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_NAME): str, | ||||
|         vol.Optional(ATTR_PASSWORD): vol.Maybe(str), | ||||
|         vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), | ||||
|         vol.Optional(ATTR_LOCATON): vol.Maybe(str), | ||||
|         vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(), | ||||
|         vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| @@ -80,6 +96,12 @@ SCHEMA_OPTIONS = vol.Schema( | ||||
|     } | ||||
| ) | ||||
|  | ||||
| SCHEMA_FREEZE = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)), | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| class APIBackups(CoreSysAttributes): | ||||
|     """Handle RESTful API for backups functions.""" | ||||
| @@ -142,7 +164,7 @@ class APIBackups(CoreSysAttributes): | ||||
|         self.sys_backups.save_data() | ||||
|  | ||||
|     @api_process | ||||
|     async def reload(self, request): | ||||
|     async def reload(self, _): | ||||
|         """Reload backup list.""" | ||||
|         await asyncio.shield(self.sys_backups.reload()) | ||||
|         return True | ||||
| @@ -177,6 +199,7 @@ class APIBackups(CoreSysAttributes): | ||||
|             ATTR_ADDONS: data_addons, | ||||
|             ATTR_REPOSITORIES: backup.repositories, | ||||
|             ATTR_FOLDERS: backup.folders, | ||||
|             ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database, | ||||
|         } | ||||
|  | ||||
|     def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]: | ||||
| @@ -192,46 +215,120 @@ class APIBackups(CoreSysAttributes): | ||||
|  | ||||
|         return body | ||||
|  | ||||
|     async def _background_backup_task( | ||||
|         self, backup_method: Callable, *args, **kwargs | ||||
|     ) -> tuple[asyncio.Task, str]: | ||||
|         """Start backup task in  background and return task and job ID.""" | ||||
|         event = asyncio.Event() | ||||
|         job, backup_task = self.sys_jobs.schedule_job( | ||||
|             backup_method, JobSchedulerOptions(), *args, **kwargs | ||||
|         ) | ||||
|  | ||||
|         async def release_on_freeze(new_state: CoreState): | ||||
|             if new_state == CoreState.FREEZE: | ||||
|                 event.set() | ||||
|  | ||||
|         # Wait for system to get into freeze state before returning | ||||
|         # If the backup fails validation it will raise before getting there | ||||
|         listener = self.sys_bus.register_event( | ||||
|             BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze | ||||
|         ) | ||||
|         try: | ||||
|             await asyncio.wait( | ||||
|                 ( | ||||
|                     backup_task, | ||||
|                     self.sys_create_task(event.wait()), | ||||
|                 ), | ||||
|                 return_when=asyncio.FIRST_COMPLETED, | ||||
|             ) | ||||
|             return (backup_task, job.uuid) | ||||
|         finally: | ||||
|             self.sys_bus.remove_listener(listener) | ||||
|  | ||||
|     @api_process | ||||
|     async def backup_full(self, request): | ||||
|         """Create full backup.""" | ||||
|         body = await api_validate(SCHEMA_BACKUP_FULL, request) | ||||
|  | ||||
|         backup = await asyncio.shield( | ||||
|             self.sys_backups.do_backup_full(**self._location_to_mount(body)) | ||||
|         background = body.pop(ATTR_BACKGROUND) | ||||
|         backup_task, job_id = await self._background_backup_task( | ||||
|             self.sys_backups.do_backup_full, **self._location_to_mount(body) | ||||
|         ) | ||||
|  | ||||
|         if background and not backup_task.done(): | ||||
|             return {ATTR_JOB_ID: job_id} | ||||
|  | ||||
|         backup: Backup = await backup_task | ||||
|         if backup: | ||||
|             return {ATTR_SLUG: backup.slug} | ||||
|         return False | ||||
|             return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug} | ||||
|         raise APIError( | ||||
|             f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details", | ||||
|             job_id=job_id, | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     async def backup_partial(self, request): | ||||
|         """Create a partial backup.""" | ||||
|         body = await api_validate(SCHEMA_BACKUP_PARTIAL, request) | ||||
|         backup = await asyncio.shield( | ||||
|             self.sys_backups.do_backup_partial(**self._location_to_mount(body)) | ||||
|         background = body.pop(ATTR_BACKGROUND) | ||||
|         backup_task, job_id = await self._background_backup_task( | ||||
|             self.sys_backups.do_backup_partial, **self._location_to_mount(body) | ||||
|         ) | ||||
|  | ||||
|         if background and not backup_task.done(): | ||||
|             return {ATTR_JOB_ID: job_id} | ||||
|  | ||||
|         backup: Backup = await backup_task | ||||
|         if backup: | ||||
|             return {ATTR_SLUG: backup.slug} | ||||
|         return False | ||||
|             return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug} | ||||
|         raise APIError( | ||||
|             f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details", | ||||
|             job_id=job_id, | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     async def restore_full(self, request): | ||||
|         """Full restore of a backup.""" | ||||
|         backup = self._extract_slug(request) | ||||
|         body = await api_validate(SCHEMA_RESTORE_FULL, request) | ||||
|         background = body.pop(ATTR_BACKGROUND) | ||||
|         restore_task, job_id = await self._background_backup_task( | ||||
|             self.sys_backups.do_restore_full, backup, **body | ||||
|         ) | ||||
|  | ||||
|         return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body)) | ||||
|         if background and not restore_task.done() or await restore_task: | ||||
|             return {ATTR_JOB_ID: job_id} | ||||
|         raise APIError( | ||||
|             f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details", | ||||
|             job_id=job_id, | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     async def restore_partial(self, request): | ||||
|         """Partial restore a backup.""" | ||||
|         backup = self._extract_slug(request) | ||||
|         body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) | ||||
|         background = body.pop(ATTR_BACKGROUND) | ||||
|         restore_task, job_id = await self._background_backup_task( | ||||
|             self.sys_backups.do_restore_partial, backup, **body | ||||
|         ) | ||||
|  | ||||
|         return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body)) | ||||
|         if background and not restore_task.done() or await restore_task: | ||||
|             return {ATTR_JOB_ID: job_id} | ||||
|         raise APIError( | ||||
|             f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details", | ||||
|             job_id=job_id, | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     async def freeze(self, request): | ||||
|         """Initiate manual freeze for external backup.""" | ||||
|         body = await api_validate(SCHEMA_FREEZE, request) | ||||
|         await asyncio.shield(self.sys_backups.freeze_all(**body)) | ||||
|  | ||||
|     @api_process | ||||
|     async def thaw(self, request): | ||||
|         """Begin thaw after manual freeze.""" | ||||
|         await self.sys_backups.thaw_all() | ||||
|  | ||||
|     @api_process | ||||
|     async def remove(self, request): | ||||
| @@ -246,9 +343,9 @@ class APIBackups(CoreSysAttributes): | ||||
|         _LOGGER.info("Downloading backup %s", backup.slug) | ||||
|         response = web.FileResponse(backup.tarfile) | ||||
|         response.content_type = CONTENT_TYPE_TAR | ||||
|         response.headers[ | ||||
|             CONTENT_DISPOSITION | ||||
|         ] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar" | ||||
|         response.headers[CONTENT_DISPOSITION] = ( | ||||
|             f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar" | ||||
|         ) | ||||
|         return response | ||||
|  | ||||
|     @api_process | ||||
| @@ -267,6 +364,8 @@ class APIBackups(CoreSysAttributes): | ||||
|                         backup.write(chunk) | ||||
|  | ||||
|             except OSError as err: | ||||
|                 if err.errno == errno.EBADMSG: | ||||
|                     self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||
|                 _LOGGER.error("Can't write new backup file: %s", err) | ||||
|                 return False | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor HA cli RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| import logging | ||||
| from typing import Any | ||||
|   | ||||
| @@ -1,11 +1,14 @@ | ||||
| """Const for API.""" | ||||
|  | ||||
| from enum import StrEnum | ||||
|  | ||||
| CONTENT_TYPE_BINARY = "application/octet-stream" | ||||
| CONTENT_TYPE_JSON = "application/json" | ||||
| CONTENT_TYPE_PNG = "image/png" | ||||
| CONTENT_TYPE_TAR = "application/tar" | ||||
| CONTENT_TYPE_TEXT = "text/plain" | ||||
| CONTENT_TYPE_URL = "application/x-www-form-urlencoded" | ||||
| CONTENT_TYPE_X_LOG = "text/x-log" | ||||
|  | ||||
| COOKIE_INGRESS = "ingress_session" | ||||
|  | ||||
| @@ -13,6 +16,10 @@ ATTR_AGENT_VERSION = "agent_version" | ||||
| ATTR_APPARMOR_VERSION = "apparmor_version" | ||||
| ATTR_ATTRIBUTES = "attributes" | ||||
| ATTR_AVAILABLE_UPDATES = "available_updates" | ||||
| ATTR_BACKGROUND = "background" | ||||
| ATTR_BOOT_CONFIG = "boot_config" | ||||
| ATTR_BOOT_SLOT = "boot_slot" | ||||
| ATTR_BOOT_SLOTS = "boot_slots" | ||||
| ATTR_BOOT_TIMESTAMP = "boot_timestamp" | ||||
| ATTR_BOOTS = "boots" | ||||
| ATTR_BROADCAST_LLMNR = "broadcast_llmnr" | ||||
| @@ -23,7 +30,6 @@ ATTR_CONNECTION_BUS = "connection_bus" | ||||
| ATTR_DATA_DISK = "data_disk" | ||||
| ATTR_DEVICE = "device" | ||||
| ATTR_DEV_PATH = "dev_path" | ||||
| ATTR_DISK_LED = "disk_led" | ||||
| ATTR_DISKS = "disks" | ||||
| ATTR_DRIVES = "drives" | ||||
| ATTR_DT_SYNCHRONIZED = "dt_synchronized" | ||||
| @@ -31,25 +37,43 @@ ATTR_DT_UTC = "dt_utc" | ||||
| ATTR_EJECTABLE = "ejectable" | ||||
| ATTR_FALLBACK = "fallback" | ||||
| ATTR_FILESYSTEMS = "filesystems" | ||||
| ATTR_HEARTBEAT_LED = "heartbeat_led" | ||||
| ATTR_FORCE = "force" | ||||
| ATTR_GROUP_IDS = "group_ids" | ||||
| ATTR_IDENTIFIERS = "identifiers" | ||||
| ATTR_IS_ACTIVE = "is_active" | ||||
| ATTR_IS_OWNER = "is_owner" | ||||
| ATTR_JOB_ID = "job_id" | ||||
| ATTR_JOBS = "jobs" | ||||
| ATTR_LLMNR = "llmnr" | ||||
| ATTR_LLMNR_HOSTNAME = "llmnr_hostname" | ||||
| ATTR_LOCAL_ONLY = "local_only" | ||||
| ATTR_MDNS = "mdns" | ||||
| ATTR_MODEL = "model" | ||||
| ATTR_MOUNTS = "mounts" | ||||
| ATTR_MOUNT_POINTS = "mount_points" | ||||
| ATTR_PANEL_PATH = "panel_path" | ||||
| ATTR_POWER_LED = "power_led" | ||||
| ATTR_REMOVABLE = "removable" | ||||
| ATTR_REMOVE_CONFIG = "remove_config" | ||||
| ATTR_REVISION = "revision" | ||||
| ATTR_SAFE_MODE = "safe_mode" | ||||
| ATTR_SEAT = "seat" | ||||
| ATTR_SIGNED = "signed" | ||||
| ATTR_STARTUP_TIME = "startup_time" | ||||
| ATTR_STATUS = "status" | ||||
| ATTR_SUBSYSTEM = "subsystem" | ||||
| ATTR_SYSFS = "sysfs" | ||||
| ATTR_SYSTEM_HEALTH_LED = "system_health_led" | ||||
| ATTR_TIME_DETECTED = "time_detected" | ||||
| ATTR_UPDATE_TYPE = "update_type" | ||||
| ATTR_USE_NTP = "use_ntp" | ||||
| ATTR_USAGE = "usage" | ||||
| ATTR_USE_NTP = "use_ntp" | ||||
| ATTR_USERS = "users" | ||||
| ATTR_VENDOR = "vendor" | ||||
| ATTR_VIRTUALIZATION = "virtualization" | ||||
|  | ||||
|  | ||||
| class BootSlot(StrEnum): | ||||
|     """Boot slots used by HAOS.""" | ||||
|  | ||||
|     A = "A" | ||||
|     B = "B" | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor network RESTful API.""" | ||||
|  | ||||
| import logging | ||||
|  | ||||
| import voluptuous as vol | ||||
| @@ -15,7 +16,6 @@ from ..const import ( | ||||
|     AddonState, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..discovery.validate import valid_discovery_service | ||||
| from ..exceptions import APIError, APIForbidden | ||||
| from .utils import api_process, api_validate, require_home_assistant | ||||
|  | ||||
| @@ -24,7 +24,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
| SCHEMA_DISCOVERY = vol.Schema( | ||||
|     { | ||||
|         vol.Required(ATTR_SERVICE): str, | ||||
|         vol.Optional(ATTR_CONFIG): vol.Maybe(dict), | ||||
|         vol.Required(ATTR_CONFIG): dict, | ||||
|     } | ||||
| ) | ||||
|  | ||||
| @@ -71,15 +71,6 @@ class APIDiscovery(CoreSysAttributes): | ||||
|         addon: Addon = request[REQUEST_FROM] | ||||
|         service = body[ATTR_SERVICE] | ||||
|  | ||||
|         try: | ||||
|             valid_discovery_service(service) | ||||
|         except vol.Invalid: | ||||
|             _LOGGER.warning( | ||||
|                 "Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on", | ||||
|                 service, | ||||
|                 addon.name, | ||||
|             ) | ||||
|  | ||||
|         # Access? | ||||
|         if body[ATTR_SERVICE] not in addon.discovery: | ||||
|             _LOGGER.error( | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor DNS RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| import logging | ||||
| @@ -26,8 +27,8 @@ from ..const import ( | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError | ||||
| from ..validate import dns_server_list, version_tag | ||||
| from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY | ||||
| from .utils import api_process, api_process_raw, api_validate | ||||
| from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -105,11 +106,6 @@ class APICoreDNS(CoreSysAttributes): | ||||
|             raise APIError(f"Version {version} is already in use") | ||||
|         await asyncio.shield(self.sys_plugins.dns.update(version)) | ||||
|  | ||||
|     @api_process_raw(CONTENT_TYPE_BINARY) | ||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||
|         """Return DNS Docker logs.""" | ||||
|         return self.sys_plugins.dns.logs() | ||||
|  | ||||
|     @api_process | ||||
|     def restart(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Restart CoreDNS plugin.""" | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Home Assistant RESTful API.""" | ||||
|  | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor hardware RESTful API.""" | ||||
|  | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| @@ -16,7 +17,7 @@ from ..const import ( | ||||
|     ATTR_SYSTEM, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..dbus.udisks2 import UDisks2 | ||||
| from ..dbus.udisks2 import UDisks2Manager | ||||
| from ..dbus.udisks2.block import UDisks2Block | ||||
| from ..dbus.udisks2.drive import UDisks2Drive | ||||
| from ..hardware.data import Device | ||||
| @@ -72,7 +73,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]: | ||||
|     } | ||||
|  | ||||
|  | ||||
| def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]: | ||||
| def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]: | ||||
|     """Return a dict with information of a disk to be used in the API.""" | ||||
|     return { | ||||
|         ATTR_VENDOR: drive.vendor, | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Home Assistant RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| import logging | ||||
| @@ -12,6 +13,7 @@ from ..const import ( | ||||
|     ATTR_AUDIO_INPUT, | ||||
|     ATTR_AUDIO_OUTPUT, | ||||
|     ATTR_BACKUP, | ||||
|     ATTR_BACKUPS_EXCLUDE_DATABASE, | ||||
|     ATTR_BLK_READ, | ||||
|     ATTR_BLK_WRITE, | ||||
|     ATTR_BOOT, | ||||
| @@ -33,10 +35,10 @@ from ..const import ( | ||||
|     ATTR_WATCHDOG, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError | ||||
| from ..exceptions import APIDBMigrationInProgress, APIError | ||||
| from ..validate import docker_image, network_port, version_tag | ||||
| from .const import CONTENT_TYPE_BINARY | ||||
| from .utils import api_process, api_process_raw, api_validate | ||||
| from .const import ATTR_FORCE, ATTR_SAFE_MODE | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -51,6 +53,7 @@ SCHEMA_OPTIONS = vol.Schema( | ||||
|         vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str), | ||||
|         vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str), | ||||
|         vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str), | ||||
|         vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| @@ -61,10 +64,34 @@ SCHEMA_UPDATE = vol.Schema( | ||||
|     } | ||||
| ) | ||||
|  | ||||
| SCHEMA_RESTART = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(), | ||||
|         vol.Optional(ATTR_FORCE, default=False): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| SCHEMA_STOP = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_FORCE, default=False): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| class APIHomeAssistant(CoreSysAttributes): | ||||
|     """Handle RESTful API for Home Assistant functions.""" | ||||
|  | ||||
|     async def _check_offline_migration(self, force: bool = False) -> None: | ||||
|         """Check and raise if there's an offline DB migration in progress.""" | ||||
|         if ( | ||||
|             not force | ||||
|             and (state := await self.sys_homeassistant.api.get_api_state()) | ||||
|             and state.offline_db_migration | ||||
|         ): | ||||
|             raise APIDBMigrationInProgress( | ||||
|                 "Offline database migration in progress, try again after it has completed" | ||||
|             ) | ||||
|  | ||||
|     @api_process | ||||
|     async def info(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Return host information.""" | ||||
| @@ -82,6 +109,7 @@ class APIHomeAssistant(CoreSysAttributes): | ||||
|             ATTR_WATCHDOG: self.sys_homeassistant.watchdog, | ||||
|             ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input, | ||||
|             ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output, | ||||
|             ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database, | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
| @@ -91,6 +119,9 @@ class APIHomeAssistant(CoreSysAttributes): | ||||
|  | ||||
|         if ATTR_IMAGE in body: | ||||
|             self.sys_homeassistant.image = body[ATTR_IMAGE] | ||||
|             self.sys_homeassistant.override_image = ( | ||||
|                 self.sys_homeassistant.image != self.sys_homeassistant.default_image | ||||
|             ) | ||||
|  | ||||
|         if ATTR_BOOT in body: | ||||
|             self.sys_homeassistant.boot = body[ATTR_BOOT] | ||||
| @@ -113,6 +144,11 @@ class APIHomeAssistant(CoreSysAttributes): | ||||
|         if ATTR_AUDIO_OUTPUT in body: | ||||
|             self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT] | ||||
|  | ||||
|         if ATTR_BACKUPS_EXCLUDE_DATABASE in body: | ||||
|             self.sys_homeassistant.backups_exclude_database = body[ | ||||
|                 ATTR_BACKUPS_EXCLUDE_DATABASE | ||||
|             ] | ||||
|  | ||||
|         self.sys_homeassistant.save_data() | ||||
|  | ||||
|     @api_process | ||||
| @@ -137,6 +173,7 @@ class APIHomeAssistant(CoreSysAttributes): | ||||
|     async def update(self, request: web.Request) -> None: | ||||
|         """Update Home Assistant.""" | ||||
|         body = await api_validate(SCHEMA_UPDATE, request) | ||||
|         await self._check_offline_migration() | ||||
|  | ||||
|         await asyncio.shield( | ||||
|             self.sys_homeassistant.core.update( | ||||
| @@ -146,9 +183,12 @@ class APIHomeAssistant(CoreSysAttributes): | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     def stop(self, request: web.Request) -> Awaitable[None]: | ||||
|     async def stop(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Stop Home Assistant.""" | ||||
|         return asyncio.shield(self.sys_homeassistant.core.stop()) | ||||
|         body = await api_validate(SCHEMA_STOP, request) | ||||
|         await self._check_offline_migration(force=body[ATTR_FORCE]) | ||||
|  | ||||
|         return await asyncio.shield(self.sys_homeassistant.core.stop()) | ||||
|  | ||||
|     @api_process | ||||
|     def start(self, request: web.Request) -> Awaitable[None]: | ||||
| @@ -156,19 +196,24 @@ class APIHomeAssistant(CoreSysAttributes): | ||||
|         return asyncio.shield(self.sys_homeassistant.core.start()) | ||||
|  | ||||
|     @api_process | ||||
|     def restart(self, request: web.Request) -> Awaitable[None]: | ||||
|     async def restart(self, request: web.Request) -> None: | ||||
|         """Restart Home Assistant.""" | ||||
|         return asyncio.shield(self.sys_homeassistant.core.restart()) | ||||
|         body = await api_validate(SCHEMA_RESTART, request) | ||||
|         await self._check_offline_migration(force=body[ATTR_FORCE]) | ||||
|  | ||||
|         await asyncio.shield( | ||||
|             self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE]) | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     def rebuild(self, request: web.Request) -> Awaitable[None]: | ||||
|     async def rebuild(self, request: web.Request) -> None: | ||||
|         """Rebuild Home Assistant.""" | ||||
|         return asyncio.shield(self.sys_homeassistant.core.rebuild()) | ||||
|         body = await api_validate(SCHEMA_RESTART, request) | ||||
|         await self._check_offline_migration(force=body[ATTR_FORCE]) | ||||
|  | ||||
|     @api_process_raw(CONTENT_TYPE_BINARY) | ||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||
|         """Return Home Assistant Docker logs.""" | ||||
|         return self.sys_homeassistant.core.logs() | ||||
|         await asyncio.shield( | ||||
|             self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE]) | ||||
|         ) | ||||
|  | ||||
|     @api_process | ||||
|     async def check(self, request: web.Request) -> None: | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor host RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from contextlib import suppress | ||||
| import logging | ||||
| @@ -27,8 +28,15 @@ from ..const import ( | ||||
|     ATTR_TIMEZONE, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError, HostLogError | ||||
| from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER | ||||
| from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError | ||||
| from ..host.const import ( | ||||
|     PARAM_BOOT_ID, | ||||
|     PARAM_FOLLOW, | ||||
|     PARAM_SYSLOG_IDENTIFIER, | ||||
|     LogFormat, | ||||
|     LogFormatter, | ||||
| ) | ||||
| from ..utils.systemd_journal import journal_logs_reader | ||||
| from .const import ( | ||||
|     ATTR_AGENT_VERSION, | ||||
|     ATTR_APPARMOR_VERSION, | ||||
| @@ -38,26 +46,48 @@ from .const import ( | ||||
|     ATTR_BROADCAST_MDNS, | ||||
|     ATTR_DT_SYNCHRONIZED, | ||||
|     ATTR_DT_UTC, | ||||
|     ATTR_FORCE, | ||||
|     ATTR_IDENTIFIERS, | ||||
|     ATTR_LLMNR_HOSTNAME, | ||||
|     ATTR_STARTUP_TIME, | ||||
|     ATTR_USE_NTP, | ||||
|     ATTR_VIRTUALIZATION, | ||||
|     CONTENT_TYPE_TEXT, | ||||
|     CONTENT_TYPE_X_LOG, | ||||
| ) | ||||
| from .utils import api_process, api_validate | ||||
| from .utils import api_process, api_process_raw, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| IDENTIFIER = "identifier" | ||||
| BOOTID = "bootid" | ||||
| DEFAULT_RANGE = 100 | ||||
| DEFAULT_LINES = 100 | ||||
|  | ||||
| SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str}) | ||||
|  | ||||
| # pylint: disable=no-value-for-parameter | ||||
| SCHEMA_SHUTDOWN = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_FORCE, default=False): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
| # pylint: enable=no-value-for-parameter | ||||
|  | ||||
|  | ||||
| class APIHost(CoreSysAttributes): | ||||
|     """Handle RESTful API for host functions.""" | ||||
|  | ||||
|     async def _check_ha_offline_migration(self, force: bool) -> None: | ||||
|         """Check if HA has an offline migration in progress and raise if not forced.""" | ||||
|         if ( | ||||
|             not force | ||||
|             and (state := await self.sys_homeassistant.api.get_api_state()) | ||||
|             and state.offline_db_migration | ||||
|         ): | ||||
|             raise APIDBMigrationInProgress( | ||||
|                 "Home Assistant offline database migration in progress, please wait until complete before shutting down host" | ||||
|             ) | ||||
|  | ||||
|     @api_process | ||||
|     async def info(self, request): | ||||
|         """Return host information.""" | ||||
| @@ -65,6 +95,7 @@ class APIHost(CoreSysAttributes): | ||||
|             ATTR_AGENT_VERSION: self.sys_dbus.agent.version, | ||||
|             ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version, | ||||
|             ATTR_CHASSIS: self.sys_host.info.chassis, | ||||
|             ATTR_VIRTUALIZATION: self.sys_host.info.virtualization, | ||||
|             ATTR_CPE: self.sys_host.info.cpe, | ||||
|             ATTR_DEPLOYMENT: self.sys_host.info.deployment, | ||||
|             ATTR_DISK_FREE: self.sys_host.info.free_space, | ||||
| @@ -98,14 +129,20 @@ class APIHost(CoreSysAttributes): | ||||
|             ) | ||||
|  | ||||
|     @api_process | ||||
|     def reboot(self, request): | ||||
|     async def reboot(self, request): | ||||
|         """Reboot host.""" | ||||
|         return asyncio.shield(self.sys_host.control.reboot()) | ||||
|         body = await api_validate(SCHEMA_SHUTDOWN, request) | ||||
|         await self._check_ha_offline_migration(force=body[ATTR_FORCE]) | ||||
|  | ||||
|         return await asyncio.shield(self.sys_host.control.reboot()) | ||||
|  | ||||
|     @api_process | ||||
|     def shutdown(self, request): | ||||
|     async def shutdown(self, request): | ||||
|         """Poweroff host.""" | ||||
|         return asyncio.shield(self.sys_host.control.shutdown()) | ||||
|         body = await api_validate(SCHEMA_SHUTDOWN, request) | ||||
|         await self._check_ha_offline_migration(force=body[ATTR_FORCE]) | ||||
|  | ||||
|         return await asyncio.shield(self.sys_host.control.shutdown()) | ||||
|  | ||||
|     @api_process | ||||
|     def reload(self, request): | ||||
| @@ -153,11 +190,11 @@ class APIHost(CoreSysAttributes): | ||||
|                 raise APIError() from err | ||||
|         return possible_offset | ||||
|  | ||||
|     @api_process | ||||
|     async def advanced_logs( | ||||
|     async def advanced_logs_handler( | ||||
|         self, request: web.Request, identifier: str | None = None, follow: bool = False | ||||
|     ) -> web.StreamResponse: | ||||
|         """Return systemd-journald logs.""" | ||||
|         log_formatter = LogFormatter.PLAIN | ||||
|         params = {} | ||||
|         if identifier: | ||||
|             params[PARAM_SYSLOG_IDENTIFIER] = identifier | ||||
| @@ -165,6 +202,8 @@ class APIHost(CoreSysAttributes): | ||||
|             params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER) | ||||
|         else: | ||||
|             params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers | ||||
|             # host logs should be always verbose, no matter what Accept header is used | ||||
|             log_formatter = LogFormatter.VERBOSE | ||||
|  | ||||
|         if BOOTID in request.match_info: | ||||
|             params[PARAM_BOOT_ID] = await self._get_boot_id( | ||||
| @@ -175,28 +214,62 @@ class APIHost(CoreSysAttributes): | ||||
|  | ||||
|         if ACCEPT in request.headers and request.headers[ACCEPT] not in [ | ||||
|             CONTENT_TYPE_TEXT, | ||||
|             CONTENT_TYPE_X_LOG, | ||||
|             "*/*", | ||||
|         ]: | ||||
|             raise APIError( | ||||
|                 "Invalid content type requested. Only text/plain supported for now." | ||||
|                 "Invalid content type requested. Only text/plain and text/x-log " | ||||
|                 "supported for now." | ||||
|             ) | ||||
|  | ||||
|         if RANGE in request.headers: | ||||
|         if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG: | ||||
|             log_formatter = LogFormatter.VERBOSE | ||||
|  | ||||
|         if "lines" in request.query: | ||||
|             lines = request.query.get("lines", DEFAULT_LINES) | ||||
|             try: | ||||
|                 lines = int(lines) | ||||
|             except ValueError: | ||||
|                 # If the user passed a non-integer value, just use the default instead of error. | ||||
|                 lines = DEFAULT_LINES | ||||
|             finally: | ||||
|                 # We can't use the entries= Range header syntax to refer to the last 1 line, | ||||
|                 # and passing 1 to the calculation below would return the 1st line of the logs | ||||
|                 # instead. Since this is really an edge case that doesn't matter much, we'll just | ||||
|                 # return 2 lines at minimum. | ||||
|                 lines = max(2, lines) | ||||
|             # entries=cursor[[:num_skip]:num_entries] | ||||
|             range_header = f"entries=:-{lines-1}:{'' if follow else lines}" | ||||
|         elif RANGE in request.headers: | ||||
|             range_header = request.headers.get(RANGE) | ||||
|         else: | ||||
|             range_header = f"entries=:-{DEFAULT_RANGE}:" | ||||
|             range_header = ( | ||||
|                 f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}" | ||||
|             ) | ||||
|  | ||||
|         async with self.sys_host.logs.journald_logs( | ||||
|             params=params, range_header=range_header | ||||
|             params=params, range_header=range_header, accept=LogFormat.JOURNAL | ||||
|         ) as resp: | ||||
|             try: | ||||
|                 response = web.StreamResponse() | ||||
|                 response.content_type = CONTENT_TYPE_TEXT | ||||
|                 await response.prepare(request) | ||||
|                 async for data in resp.content: | ||||
|                     await response.write(data) | ||||
|                 headers_returned = False | ||||
|                 async for cursor, line in journal_logs_reader(resp, log_formatter): | ||||
|                     if not headers_returned: | ||||
|                         if cursor: | ||||
|                             response.headers["X-First-Cursor"] = cursor | ||||
|                         await response.prepare(request) | ||||
|                         headers_returned = True | ||||
|                     await response.write(line.encode("utf-8") + b"\n") | ||||
|             except ConnectionResetError as ex: | ||||
|                 raise APIError( | ||||
|                     "Connection reset when trying to fetch data from systemd-journald." | ||||
|                 ) from ex | ||||
|             return response | ||||
|  | ||||
|     @api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT) | ||||
|     async def advanced_logs( | ||||
|         self, request: web.Request, identifier: str | None = None, follow: bool = False | ||||
|     ) -> web.StreamResponse: | ||||
|         """Return systemd-journald logs. Wrapped as standard API handler.""" | ||||
|         return await self.advanced_logs_handler(request, identifier, follow) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Supervisor Add-on ingress service.""" | ||||
|  | ||||
| import asyncio | ||||
| from ipaddress import ip_address | ||||
| import logging | ||||
| @@ -21,11 +22,18 @@ from ..const import ( | ||||
|     ATTR_ICON, | ||||
|     ATTR_PANELS, | ||||
|     ATTR_SESSION, | ||||
|     ATTR_SESSION_DATA_USER_ID, | ||||
|     ATTR_TITLE, | ||||
|     HEADER_REMOTE_USER_DISPLAY_NAME, | ||||
|     HEADER_REMOTE_USER_ID, | ||||
|     HEADER_REMOTE_USER_NAME, | ||||
|     HEADER_TOKEN, | ||||
|     HEADER_TOKEN_OLD, | ||||
|     IngressSessionData, | ||||
|     IngressSessionDataUser, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import HomeAssistantAPIError | ||||
| from .const import COOKIE_INGRESS | ||||
| from .utils import api_process, api_validate, require_home_assistant | ||||
|  | ||||
| @@ -33,10 +41,46 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str}) | ||||
|  | ||||
| """Expected optional payload of create session request""" | ||||
| SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_SESSION_DATA_USER_ID): str, | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| # from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1 | ||||
| def must_be_empty_body(method: str, code: int) -> bool: | ||||
|     """Check if a request must return an empty body.""" | ||||
|     return ( | ||||
|         status_code_must_be_empty_body(code) | ||||
|         or method_must_be_empty_body(method) | ||||
|         or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def method_must_be_empty_body(method: str) -> bool: | ||||
|     """Check if a method must return an empty body.""" | ||||
|     # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 | ||||
|     # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 | ||||
|     return method.upper() == hdrs.METH_HEAD | ||||
|  | ||||
|  | ||||
| def status_code_must_be_empty_body(code: int) -> bool: | ||||
|     """Check if a status code must return an empty body.""" | ||||
|     # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 | ||||
|     return code in {204, 304} or 100 <= code < 200 | ||||
|  | ||||
|  | ||||
| class APIIngress(CoreSysAttributes): | ||||
|     """Ingress view to handle add-on webui routing.""" | ||||
|  | ||||
|     _list_of_users: list[IngressSessionDataUser] | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize APIIngress.""" | ||||
|         self._list_of_users = [] | ||||
|  | ||||
|     def _extract_addon(self, request: web.Request) -> Addon: | ||||
|         """Return addon, throw an exception it it doesn't exist.""" | ||||
|         token = request.match_info.get("token") | ||||
| @@ -71,7 +115,19 @@ class APIIngress(CoreSysAttributes): | ||||
|     @require_home_assistant | ||||
|     async def create_session(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Create a new session.""" | ||||
|         session = self.sys_ingress.create_session() | ||||
|         schema_ingress_config_session_data = await api_validate( | ||||
|             SCHEMA_INGRESS_CREATE_SESSION_DATA, request | ||||
|         ) | ||||
|         data: IngressSessionData | None = None | ||||
|  | ||||
|         if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data: | ||||
|             user = await self._find_user_by_id( | ||||
|                 schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID] | ||||
|             ) | ||||
|             if user: | ||||
|                 data = IngressSessionData(user) | ||||
|  | ||||
|         session = self.sys_ingress.create_session(data) | ||||
|         return {ATTR_SESSION: session} | ||||
|  | ||||
|     @api_process | ||||
| @@ -99,13 +155,14 @@ class APIIngress(CoreSysAttributes): | ||||
|         # Process requests | ||||
|         addon = self._extract_addon(request) | ||||
|         path = request.match_info.get("path") | ||||
|         session_data = self.sys_ingress.get_session_data(session) | ||||
|         try: | ||||
|             # Websocket | ||||
|             if _is_websocket(request): | ||||
|                 return await self._handle_websocket(request, addon, path) | ||||
|                 return await self._handle_websocket(request, addon, path, session_data) | ||||
|  | ||||
|             # Request | ||||
|             return await self._handle_request(request, addon, path) | ||||
|             return await self._handle_request(request, addon, path, session_data) | ||||
|  | ||||
|         except aiohttp.ClientError as err: | ||||
|             _LOGGER.error("Ingress error: %s", err) | ||||
| @@ -113,7 +170,11 @@ class APIIngress(CoreSysAttributes): | ||||
|         raise HTTPBadGateway() | ||||
|  | ||||
|     async def _handle_websocket( | ||||
|         self, request: web.Request, addon: Addon, path: str | ||||
|         self, | ||||
|         request: web.Request, | ||||
|         addon: Addon, | ||||
|         path: str, | ||||
|         session_data: IngressSessionData | None, | ||||
|     ) -> web.WebSocketResponse: | ||||
|         """Ingress route for websocket.""" | ||||
|         if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers: | ||||
| @@ -131,7 +192,7 @@ class APIIngress(CoreSysAttributes): | ||||
|  | ||||
|         # Preparing | ||||
|         url = self._create_url(addon, path) | ||||
|         source_header = _init_header(request, addon) | ||||
|         source_header = _init_header(request, addon, session_data) | ||||
|  | ||||
|         # Support GET query | ||||
|         if request.query_string: | ||||
| @@ -157,11 +218,15 @@ class APIIngress(CoreSysAttributes): | ||||
|         return ws_server | ||||
|  | ||||
|     async def _handle_request( | ||||
|         self, request: web.Request, addon: Addon, path: str | ||||
|         self, | ||||
|         request: web.Request, | ||||
|         addon: Addon, | ||||
|         path: str, | ||||
|         session_data: IngressSessionData | None, | ||||
|     ) -> web.Response | web.StreamResponse: | ||||
|         """Ingress route for request.""" | ||||
|         url = self._create_url(addon, path) | ||||
|         source_header = _init_header(request, addon) | ||||
|         source_header = _init_header(request, addon, session_data) | ||||
|  | ||||
|         # Passing the raw stream breaks requests for some webservers | ||||
|         # since we just need it for POST requests really, for all other methods | ||||
| @@ -184,10 +249,18 @@ class APIIngress(CoreSysAttributes): | ||||
|             skip_auto_headers={hdrs.CONTENT_TYPE}, | ||||
|         ) as result: | ||||
|             headers = _response_header(result) | ||||
|  | ||||
|             # Avoid parsing content_type in simple cases for better performance | ||||
|             if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE): | ||||
|                 content_type = (maybe_content_type.partition(";"))[0].strip() | ||||
|             else: | ||||
|                 content_type = result.content_type | ||||
|             # Simple request | ||||
|             if ( | ||||
|                 hdrs.CONTENT_LENGTH in result.headers | ||||
|                 # empty body responses should not be streamed, | ||||
|                 # otherwise aiohttp < 3.9.0 may generate | ||||
|                 # an invalid "0\r\n\r\n" chunk instead of an empty response. | ||||
|                 must_be_empty_body(request.method, result.status) | ||||
|                 or hdrs.CONTENT_LENGTH in result.headers | ||||
|                 and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000 | ||||
|             ): | ||||
|                 # Return Response | ||||
| @@ -195,13 +268,13 @@ class APIIngress(CoreSysAttributes): | ||||
|                 return web.Response( | ||||
|                     headers=headers, | ||||
|                     status=result.status, | ||||
|                     content_type=result.content_type, | ||||
|                     content_type=content_type, | ||||
|                     body=body, | ||||
|                 ) | ||||
|  | ||||
|             # Stream response | ||||
|             response = web.StreamResponse(status=result.status, headers=headers) | ||||
|             response.content_type = result.content_type | ||||
|             response.content_type = content_type | ||||
|  | ||||
|             try: | ||||
|                 await response.prepare(request) | ||||
| @@ -217,11 +290,35 @@ class APIIngress(CoreSysAttributes): | ||||
|  | ||||
|             return response | ||||
|  | ||||
|     async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None: | ||||
|         """Find user object by the user's ID.""" | ||||
|         try: | ||||
|             list_of_users = await self.sys_homeassistant.get_users() | ||||
|         except (HomeAssistantAPIError, TypeError) as err: | ||||
|             _LOGGER.error( | ||||
|                 "%s error occurred while requesting list of users: %s", type(err), err | ||||
|             ) | ||||
|             return None | ||||
|  | ||||
| def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, str]: | ||||
|         if list_of_users is not None: | ||||
|             self._list_of_users = list_of_users | ||||
|  | ||||
|         return next((user for user in self._list_of_users if user.id == user_id), None) | ||||
|  | ||||
|  | ||||
| def _init_header( | ||||
|     request: web.Request, addon: Addon, session_data: IngressSessionData | None | ||||
| ) -> CIMultiDict | dict[str, str]: | ||||
|     """Create initial header.""" | ||||
|     headers = {} | ||||
|  | ||||
|     if session_data is not None: | ||||
|         headers[HEADER_REMOTE_USER_ID] = session_data.user.id | ||||
|         if session_data.user.username is not None: | ||||
|             headers[HEADER_REMOTE_USER_NAME] = session_data.user.username | ||||
|         if session_data.user.display_name is not None: | ||||
|             headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name | ||||
|  | ||||
|     # filter flags | ||||
|     for name, value in request.headers.items(): | ||||
|         if name in ( | ||||
| @@ -234,6 +331,9 @@ def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, st | ||||
|             hdrs.SEC_WEBSOCKET_KEY, | ||||
|             istr(HEADER_TOKEN), | ||||
|             istr(HEADER_TOKEN_OLD), | ||||
|             istr(HEADER_REMOTE_USER_ID), | ||||
|             istr(HEADER_REMOTE_USER_NAME), | ||||
|             istr(HEADER_REMOTE_USER_DISPLAY_NAME), | ||||
|         ): | ||||
|             continue | ||||
|         headers[name] = value | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Jobs RESTful API.""" | ||||
|  | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| @@ -6,7 +7,10 @@ from aiohttp import web | ||||
| import voluptuous as vol | ||||
|  | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError | ||||
| from ..jobs import SupervisorJob | ||||
| from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition | ||||
| from .const import ATTR_JOBS | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
| @@ -19,11 +23,47 @@ SCHEMA_OPTIONS = vol.Schema( | ||||
| class APIJobs(CoreSysAttributes): | ||||
|     """Handle RESTful API for OS functions.""" | ||||
|  | ||||
|     def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]: | ||||
|         """Return current job tree.""" | ||||
|         jobs_by_parent: dict[str | None, list[SupervisorJob]] = {} | ||||
|         for job in self.sys_jobs.jobs: | ||||
|             if job.internal: | ||||
|                 continue | ||||
|  | ||||
|             if job.parent_id not in jobs_by_parent: | ||||
|                 jobs_by_parent[job.parent_id] = [job] | ||||
|             else: | ||||
|                 jobs_by_parent[job.parent_id].append(job) | ||||
|  | ||||
|         job_list: list[dict[str, Any]] = [] | ||||
|         queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = ( | ||||
|             [(job_list, start)] | ||||
|             if start | ||||
|             else [(job_list, job) for job in jobs_by_parent.get(None, [])] | ||||
|         ) | ||||
|  | ||||
|         while queue: | ||||
|             (current_list, current_job) = queue.pop(0) | ||||
|             child_jobs: list[dict[str, Any]] = [] | ||||
|  | ||||
|             # We remove parent_id and instead use that info to represent jobs as a tree | ||||
|             job_dict = current_job.as_dict() | {"child_jobs": child_jobs} | ||||
|             job_dict.pop("parent_id") | ||||
|             current_list.append(job_dict) | ||||
|  | ||||
|             if current_job.uuid in jobs_by_parent: | ||||
|                 queue.extend( | ||||
|                     [(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)] | ||||
|                 ) | ||||
|  | ||||
|         return job_list | ||||
|  | ||||
|     @api_process | ||||
|     async def info(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Return JobManager information.""" | ||||
|         return { | ||||
|             ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions, | ||||
|             ATTR_JOBS: self._list_jobs(), | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
| @@ -42,3 +82,19 @@ class APIJobs(CoreSysAttributes): | ||||
|     async def reset(self, request: web.Request) -> None: | ||||
|         """Reset options for JobManager.""" | ||||
|         self.sys_jobs.reset_data() | ||||
|  | ||||
|     @api_process | ||||
|     async def job_info(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Get details of a job by ID.""" | ||||
|         job = self.sys_jobs.get_job(request.match_info.get("uuid")) | ||||
|         return self._list_jobs(job)[0] | ||||
|  | ||||
|     @api_process | ||||
|     async def remove_job(self, request: web.Request) -> None: | ||||
|         """Remove a completed job.""" | ||||
|         job = self.sys_jobs.get_job(request.match_info.get("uuid")) | ||||
|  | ||||
|         if not job.done: | ||||
|             raise APIError(f"Job {job.uuid} is not done!") | ||||
|  | ||||
|         self.sys_jobs.remove_job(job) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Handle security part of this API.""" | ||||
|  | ||||
| import logging | ||||
| import re | ||||
| from typing import Final | ||||
| @@ -8,6 +9,8 @@ from aiohttp.web import Request, RequestHandler, Response, middleware | ||||
| from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized | ||||
| from awesomeversion import AwesomeVersion | ||||
|  | ||||
| from supervisor.homeassistant.const import LANDINGPAGE | ||||
|  | ||||
| from ...addons.const import RE_SLUG | ||||
| from ...const import ( | ||||
|     REQUEST_FROM, | ||||
| @@ -19,6 +22,7 @@ from ...const import ( | ||||
|     CoreState, | ||||
| ) | ||||
| from ...coresys import CoreSys, CoreSysAttributes | ||||
| from ...utils import version_is_new_enough | ||||
| from ..utils import api_return_error, excract_supervisor_token | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
| @@ -76,6 +80,13 @@ ADDONS_API_BYPASS: Final = re.compile( | ||||
|     r")$" | ||||
| ) | ||||
|  | ||||
| # Home Assistant only | ||||
| CORE_ONLY_PATHS: Final = re.compile( | ||||
|     r"^(?:" | ||||
|     r"/addons/" + RE_SLUG + "/sys_options" | ||||
|     r")$" | ||||
| ) | ||||
|  | ||||
| # Policy role add-on API access | ||||
| ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = { | ||||
|     ROLE_DEFAULT: re.compile( | ||||
| @@ -102,6 +113,8 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = { | ||||
|         r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?" | ||||
|         r"|/audio/.+" | ||||
|         r"|/auth/cache" | ||||
|         r"|/available_updates" | ||||
|         r"|/backups.*" | ||||
|         r"|/cli/.+" | ||||
|         r"|/core/.+" | ||||
|         r"|/dns/.+" | ||||
| @@ -111,16 +124,17 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = { | ||||
|         r"|/hassos/.+" | ||||
|         r"|/homeassistant/.+" | ||||
|         r"|/host/.+" | ||||
|         r"|/mounts.*" | ||||
|         r"|/multicast/.+" | ||||
|         r"|/network/.+" | ||||
|         r"|/observer/.+" | ||||
|         r"|/os/.+" | ||||
|         r"|/os/(?!datadisk/wipe).+" | ||||
|         r"|/refresh_updates" | ||||
|         r"|/resolution/.+" | ||||
|         r"|/backups.*" | ||||
|         r"|/security/.+" | ||||
|         r"|/snapshots.*" | ||||
|         r"|/store.*" | ||||
|         r"|/supervisor/.+" | ||||
|         r"|/security/.+" | ||||
|         r")$" | ||||
|     ), | ||||
|     ROLE_ADMIN: re.compile( | ||||
| @@ -195,7 +209,7 @@ class SecurityMiddleware(CoreSysAttributes): | ||||
|             CoreState.FREEZE, | ||||
|         ): | ||||
|             return api_return_error( | ||||
|                 message=f"System is not ready with state: {self.sys_core.state.value}" | ||||
|                 message=f"System is not ready with state: {self.sys_core.state}" | ||||
|             ) | ||||
|  | ||||
|         return await handler(request) | ||||
| @@ -228,6 +242,9 @@ class SecurityMiddleware(CoreSysAttributes): | ||||
|         if supervisor_token == self.sys_homeassistant.supervisor_token: | ||||
|             _LOGGER.debug("%s access from Home Assistant", request.path) | ||||
|             request_from = self.sys_homeassistant | ||||
|         elif CORE_ONLY_PATHS.match(request.path): | ||||
|             _LOGGER.warning("Attempted access to %s from client besides Home Assistant") | ||||
|             raise HTTPForbidden() | ||||
|  | ||||
|         # Host | ||||
|         if supervisor_token == self.sys_plugins.cli.supervisor_token: | ||||
| @@ -275,7 +292,8 @@ class SecurityMiddleware(CoreSysAttributes): | ||||
|         """Validate user from Core API proxy.""" | ||||
|         if ( | ||||
|             request[REQUEST_FROM] != self.sys_homeassistant | ||||
|             or self.sys_homeassistant.version >= _CORE_VERSION | ||||
|             or self.sys_homeassistant.version == LANDINGPAGE | ||||
|             or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION) | ||||
|         ): | ||||
|             return await handler(request) | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Multicast RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| import logging | ||||
| @@ -23,8 +24,7 @@ from ..const import ( | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError | ||||
| from ..validate import version_tag | ||||
| from .const import CONTENT_TYPE_BINARY | ||||
| from .utils import api_process, api_process_raw, api_validate | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -69,11 +69,6 @@ class APIMulticast(CoreSysAttributes): | ||||
|             raise APIError(f"Version {version} is already in use") | ||||
|         await asyncio.shield(self.sys_plugins.multicast.update(version)) | ||||
|  | ||||
|     @api_process_raw(CONTENT_TYPE_BINARY) | ||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||
|         """Return Multicast Docker logs.""" | ||||
|         return self.sys_plugins.multicast.logs() | ||||
|  | ||||
|     @api_process | ||||
|     def restart(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Restart Multicast plugin.""" | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| """REST API for network.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from dataclasses import replace | ||||
| from ipaddress import ip_address, ip_interface | ||||
| from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface | ||||
| from typing import Any | ||||
|  | ||||
| from aiohttp import web | ||||
| @@ -48,18 +48,28 @@ from ..host.configuration import ( | ||||
|     Interface, | ||||
|     InterfaceMethod, | ||||
|     IpConfig, | ||||
|     IpSetting, | ||||
|     VlanConfig, | ||||
|     WifiConfig, | ||||
| ) | ||||
| from ..host.const import AuthMethod, InterfaceType, WifiMode | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _SCHEMA_IP_CONFIG = vol.Schema( | ||||
| _SCHEMA_IPV4_CONFIG = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)], | ||||
|         vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)], | ||||
|         vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod), | ||||
|         vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address), | ||||
|         vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)], | ||||
|         vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address), | ||||
|         vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)], | ||||
|     } | ||||
| ) | ||||
|  | ||||
| _SCHEMA_IPV6_CONFIG = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)], | ||||
|         vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod), | ||||
|         vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address), | ||||
|         vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)], | ||||
|     } | ||||
| ) | ||||
|  | ||||
| @@ -76,18 +86,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema( | ||||
| # pylint: disable=no-value-for-parameter | ||||
| SCHEMA_UPDATE = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG, | ||||
|         vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG, | ||||
|         vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG, | ||||
|         vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG, | ||||
|         vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG, | ||||
|         vol.Optional(ATTR_ENABLED): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| def ipconfig_struct(config: IpConfig) -> dict[str, Any]: | ||||
| def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]: | ||||
|     """Return a dict with information about ip configuration.""" | ||||
|     return { | ||||
|         ATTR_METHOD: config.method, | ||||
|         ATTR_METHOD: setting.method, | ||||
|         ATTR_ADDRESS: [address.with_prefixlen for address in config.address], | ||||
|         ATTR_NAMESERVERS: [str(address) for address in config.nameservers], | ||||
|         ATTR_GATEWAY: str(config.gateway) if config.gateway else None, | ||||
| @@ -122,8 +132,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]: | ||||
|         ATTR_CONNECTED: interface.connected, | ||||
|         ATTR_PRIMARY: interface.primary, | ||||
|         ATTR_MAC: interface.mac, | ||||
|         ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None, | ||||
|         ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None, | ||||
|         ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting), | ||||
|         ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting), | ||||
|         ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None, | ||||
|         ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None, | ||||
|     } | ||||
| @@ -197,24 +207,26 @@ class APINetwork(CoreSysAttributes): | ||||
|         # Apply config | ||||
|         for key, config in body.items(): | ||||
|             if key == ATTR_IPV4: | ||||
|                 interface.ipv4 = replace( | ||||
|                     interface.ipv4 | ||||
|                     or IpConfig(InterfaceMethod.STATIC, [], None, [], None), | ||||
|                     **config, | ||||
|                 interface.ipv4setting = IpSetting( | ||||
|                     config.get(ATTR_METHOD, InterfaceMethod.STATIC), | ||||
|                     config.get(ATTR_ADDRESS, []), | ||||
|                     config.get(ATTR_GATEWAY), | ||||
|                     config.get(ATTR_NAMESERVERS, []), | ||||
|                 ) | ||||
|             elif key == ATTR_IPV6: | ||||
|                 interface.ipv6 = replace( | ||||
|                     interface.ipv6 | ||||
|                     or IpConfig(InterfaceMethod.STATIC, [], None, [], None), | ||||
|                     **config, | ||||
|                 interface.ipv6setting = IpSetting( | ||||
|                     config.get(ATTR_METHOD, InterfaceMethod.STATIC), | ||||
|                     config.get(ATTR_ADDRESS, []), | ||||
|                     config.get(ATTR_GATEWAY), | ||||
|                     config.get(ATTR_NAMESERVERS, []), | ||||
|                 ) | ||||
|             elif key == ATTR_WIFI: | ||||
|                 interface.wifi = replace( | ||||
|                     interface.wifi | ||||
|                     or WifiConfig( | ||||
|                         WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None | ||||
|                     ), | ||||
|                     **config, | ||||
|                 interface.wifi = WifiConfig( | ||||
|                     config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE), | ||||
|                     config.get(ATTR_SSID, ""), | ||||
|                     config.get(ATTR_AUTH, AuthMethod.OPEN), | ||||
|                     config.get(ATTR_PSK, None), | ||||
|                     None, | ||||
|                 ) | ||||
|             elif key == ATTR_ENABLED: | ||||
|                 interface.enabled = config | ||||
| @@ -256,35 +268,36 @@ class APINetwork(CoreSysAttributes): | ||||
|  | ||||
|         vlan_config = VlanConfig(vlan, interface.name) | ||||
|  | ||||
|         ipv4_config = None | ||||
|         ipv4_setting = None | ||||
|         if ATTR_IPV4 in body: | ||||
|             ipv4_config = IpConfig( | ||||
|             ipv4_setting = IpSetting( | ||||
|                 body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO), | ||||
|                 body[ATTR_IPV4].get(ATTR_ADDRESS, []), | ||||
|                 body[ATTR_IPV4].get(ATTR_GATEWAY, None), | ||||
|                 body[ATTR_IPV4].get(ATTR_NAMESERVERS, []), | ||||
|                 None, | ||||
|             ) | ||||
|  | ||||
|         ipv6_config = None | ||||
|         ipv6_setting = None | ||||
|         if ATTR_IPV6 in body: | ||||
|             ipv6_config = IpConfig( | ||||
|             ipv6_setting = IpSetting( | ||||
|                 body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO), | ||||
|                 body[ATTR_IPV6].get(ATTR_ADDRESS, []), | ||||
|                 body[ATTR_IPV6].get(ATTR_GATEWAY, None), | ||||
|                 body[ATTR_IPV6].get(ATTR_NAMESERVERS, []), | ||||
|                 None, | ||||
|             ) | ||||
|  | ||||
|         vlan_interface = Interface( | ||||
|             "", | ||||
|             "", | ||||
|             "", | ||||
|             True, | ||||
|             True, | ||||
|             False, | ||||
|             InterfaceType.VLAN, | ||||
|             ipv4_config, | ||||
|             ipv6_config, | ||||
|             None, | ||||
|             ipv4_setting, | ||||
|             None, | ||||
|             ipv6_setting, | ||||
|             None, | ||||
|             vlan_config, | ||||
|         ) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Observer RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| import logging | ||||
| from typing import Any | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor HassOS RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| import logging | ||||
| @@ -8,13 +9,18 @@ from aiohttp import web | ||||
| import voluptuous as vol | ||||
|  | ||||
| from ..const import ( | ||||
|     ATTR_ACTIVITY_LED, | ||||
|     ATTR_BOARD, | ||||
|     ATTR_BOOT, | ||||
|     ATTR_DEVICES, | ||||
|     ATTR_DISK_LED, | ||||
|     ATTR_HEARTBEAT_LED, | ||||
|     ATTR_ID, | ||||
|     ATTR_NAME, | ||||
|     ATTR_POWER_LED, | ||||
|     ATTR_SERIAL, | ||||
|     ATTR_SIZE, | ||||
|     ATTR_STATE, | ||||
|     ATTR_UPDATE_AVAILABLE, | ||||
|     ATTR_VERSION, | ||||
|     ATTR_VERSION_LATEST, | ||||
| @@ -24,24 +30,27 @@ from ..exceptions import BoardInvalidError | ||||
| from ..resolution.const import ContextType, IssueType, SuggestionType | ||||
| from ..validate import version_tag | ||||
| from .const import ( | ||||
|     ATTR_BOOT_SLOT, | ||||
|     ATTR_BOOT_SLOTS, | ||||
|     ATTR_DATA_DISK, | ||||
|     ATTR_DEV_PATH, | ||||
|     ATTR_DEVICE, | ||||
|     ATTR_DISK_LED, | ||||
|     ATTR_DISKS, | ||||
|     ATTR_HEARTBEAT_LED, | ||||
|     ATTR_MODEL, | ||||
|     ATTR_POWER_LED, | ||||
|     ATTR_STATUS, | ||||
|     ATTR_SYSTEM_HEALTH_LED, | ||||
|     ATTR_VENDOR, | ||||
|     BootSlot, | ||||
| ) | ||||
| from .utils import api_process, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| # pylint: disable=no-value-for-parameter | ||||
| SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) | ||||
| SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)}) | ||||
| SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str}) | ||||
|  | ||||
| # pylint: disable=no-value-for-parameter | ||||
| SCHEMA_YELLOW_OPTIONS = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_DISK_LED): vol.Boolean(), | ||||
| @@ -49,6 +58,14 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema( | ||||
|         vol.Optional(ATTR_POWER_LED): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
| SCHEMA_GREEN_OPTIONS = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(), | ||||
|         vol.Optional(ATTR_POWER_LED): vol.Boolean(), | ||||
|         vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(), | ||||
|     } | ||||
| ) | ||||
| # pylint: enable=no-value-for-parameter | ||||
|  | ||||
|  | ||||
| class APIOS(CoreSysAttributes): | ||||
| @@ -64,6 +81,15 @@ class APIOS(CoreSysAttributes): | ||||
|             ATTR_BOARD: self.sys_os.board, | ||||
|             ATTR_BOOT: self.sys_dbus.rauc.boot_slot, | ||||
|             ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id, | ||||
|             ATTR_BOOT_SLOTS: { | ||||
|                 slot.bootname: { | ||||
|                     ATTR_STATE: slot.state, | ||||
|                     ATTR_STATUS: slot.boot_status, | ||||
|                     ATTR_VERSION: slot.bundle_version, | ||||
|                 } | ||||
|                 for slot in self.sys_os.slots | ||||
|                 if slot.bootname | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
| @@ -86,6 +112,17 @@ class APIOS(CoreSysAttributes): | ||||
|  | ||||
|         await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE])) | ||||
|  | ||||
|     @api_process | ||||
|     def wipe_data(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Trigger data disk wipe on Host.""" | ||||
|         return asyncio.shield(self.sys_os.datadisk.wipe_disk()) | ||||
|  | ||||
|     @api_process | ||||
|     async def set_boot_slot(self, request: web.Request) -> None: | ||||
|         """Change the active boot slot and reboot into it.""" | ||||
|         body = await api_validate(SCHEMA_SET_BOOT_SLOT, request) | ||||
|         await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT])) | ||||
|  | ||||
|     @api_process | ||||
|     async def list_data(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Return possible data targets.""" | ||||
| @@ -105,6 +142,35 @@ class APIOS(CoreSysAttributes): | ||||
|             ], | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
|     async def boards_green_info(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Get green board settings.""" | ||||
|         return { | ||||
|             ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led, | ||||
|             ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led, | ||||
|             ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led, | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
|     async def boards_green_options(self, request: web.Request) -> None: | ||||
|         """Update green board settings.""" | ||||
|         body = await api_validate(SCHEMA_GREEN_OPTIONS, request) | ||||
|  | ||||
|         if ATTR_ACTIVITY_LED in body: | ||||
|             await self.sys_dbus.agent.board.green.set_activity_led( | ||||
|                 body[ATTR_ACTIVITY_LED] | ||||
|             ) | ||||
|  | ||||
|         if ATTR_POWER_LED in body: | ||||
|             await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED]) | ||||
|  | ||||
|         if ATTR_SYSTEM_HEALTH_LED in body: | ||||
|             await self.sys_dbus.agent.board.green.set_user_led( | ||||
|                 body[ATTR_SYSTEM_HEALTH_LED] | ||||
|             ) | ||||
|  | ||||
|         self.sys_dbus.agent.board.green.save_data() | ||||
|  | ||||
|     @api_process | ||||
|     async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Get yellow board settings.""" | ||||
| @@ -120,14 +186,17 @@ class APIOS(CoreSysAttributes): | ||||
|         body = await api_validate(SCHEMA_YELLOW_OPTIONS, request) | ||||
|  | ||||
|         if ATTR_DISK_LED in body: | ||||
|             self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED] | ||||
|             await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED]) | ||||
|  | ||||
|         if ATTR_HEARTBEAT_LED in body: | ||||
|             self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED] | ||||
|             await self.sys_dbus.agent.board.yellow.set_heartbeat_led( | ||||
|                 body[ATTR_HEARTBEAT_LED] | ||||
|             ) | ||||
|  | ||||
|         if ATTR_POWER_LED in body: | ||||
|             self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED] | ||||
|             await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED]) | ||||
|  | ||||
|         self.sys_dbus.agent.board.yellow.save_data() | ||||
|         self.sys_resolution.create_issue( | ||||
|             IssueType.REBOOT_REQUIRED, | ||||
|             ContextType.SYSTEM, | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Utils for Home Assistant Proxy.""" | ||||
|  | ||||
| import asyncio | ||||
| from contextlib import asynccontextmanager | ||||
| import logging | ||||
| @@ -6,11 +7,15 @@ import logging | ||||
| import aiohttp | ||||
| from aiohttp import web | ||||
| from aiohttp.client_exceptions import ClientConnectorError | ||||
| from aiohttp.client_ws import ClientWebSocketResponse | ||||
| from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE | ||||
| from aiohttp.http import WSMessage | ||||
| from aiohttp.http_websocket import WSMsgType | ||||
| from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized | ||||
|  | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError | ||||
| from ..utils.json import json_dumps | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -74,7 +79,7 @@ class APIProxy(CoreSysAttributes): | ||||
|             _LOGGER.error("Error on API for request %s", path) | ||||
|         except aiohttp.ClientError as err: | ||||
|             _LOGGER.error("Client error on API %s request %s", path, err) | ||||
|         except asyncio.TimeoutError: | ||||
|         except TimeoutError: | ||||
|             _LOGGER.error("Client timeout error on API request %s", path) | ||||
|  | ||||
|         raise HTTPBadGateway() | ||||
| @@ -114,7 +119,7 @@ class APIProxy(CoreSysAttributes): | ||||
|                 body=data, status=client.status, content_type=client.content_type | ||||
|             ) | ||||
|  | ||||
|     async def _websocket_client(self): | ||||
|     async def _websocket_client(self) -> ClientWebSocketResponse: | ||||
|         """Initialize a WebSocket API connection.""" | ||||
|         url = f"{self.sys_homeassistant.api_url}/api/websocket" | ||||
|  | ||||
| @@ -142,7 +147,8 @@ class APIProxy(CoreSysAttributes): | ||||
|                 { | ||||
|                     "type": "auth", | ||||
|                     "access_token": self.sys_homeassistant.api.access_token, | ||||
|                 } | ||||
|                 }, | ||||
|                 dumps=json_dumps, | ||||
|             ) | ||||
|  | ||||
|             data = await client.receive_json() | ||||
| @@ -167,6 +173,28 @@ class APIProxy(CoreSysAttributes): | ||||
|  | ||||
|         raise APIError() | ||||
|  | ||||
|     async def _proxy_message( | ||||
|         self, | ||||
|         read_task: asyncio.Task, | ||||
|         target: web.WebSocketResponse | ClientWebSocketResponse, | ||||
|     ) -> None: | ||||
|         """Proxy a message from client to server or vice versa.""" | ||||
|         if read_task.exception(): | ||||
|             raise read_task.exception() | ||||
|  | ||||
|         msg: WSMessage = read_task.result() | ||||
|         if msg.type == WSMsgType.TEXT: | ||||
|             return await target.send_str(msg.data) | ||||
|         if msg.type == WSMsgType.BINARY: | ||||
|             return await target.send_bytes(msg.data) | ||||
|         if msg.type == WSMsgType.CLOSE: | ||||
|             _LOGGER.debug("Received close message from WebSocket.") | ||||
|             return await target.close() | ||||
|  | ||||
|         raise TypeError( | ||||
|             f"Cannot proxy websocket message of unsupported type: {msg.type}" | ||||
|         ) | ||||
|  | ||||
|     async def websocket(self, request: web.Request): | ||||
|         """Initialize a WebSocket API connection.""" | ||||
|         if not await self.sys_homeassistant.api.check_api_state(): | ||||
| @@ -176,11 +204,13 @@ class APIProxy(CoreSysAttributes): | ||||
|         # init server | ||||
|         server = web.WebSocketResponse(heartbeat=30) | ||||
|         await server.prepare(request) | ||||
|         addon_name = None | ||||
|  | ||||
|         # handle authentication | ||||
|         try: | ||||
|             await server.send_json( | ||||
|                 {"type": "auth_required", "ha_version": self.sys_homeassistant.version} | ||||
|                 {"type": "auth_required", "ha_version": self.sys_homeassistant.version}, | ||||
|                 dumps=json_dumps, | ||||
|             ) | ||||
|  | ||||
|             # Check API access | ||||
| @@ -193,14 +223,17 @@ class APIProxy(CoreSysAttributes): | ||||
|             if not addon or not addon.access_homeassistant_api: | ||||
|                 _LOGGER.warning("Unauthorized WebSocket access!") | ||||
|                 await server.send_json( | ||||
|                     {"type": "auth_invalid", "message": "Invalid access"} | ||||
|                     {"type": "auth_invalid", "message": "Invalid access"}, | ||||
|                     dumps=json_dumps, | ||||
|                 ) | ||||
|                 return server | ||||
|  | ||||
|             _LOGGER.info("WebSocket access from %s", addon.slug) | ||||
|             addon_name = addon.slug | ||||
|             _LOGGER.info("WebSocket access from %s", addon_name) | ||||
|  | ||||
|             await server.send_json( | ||||
|                 {"type": "auth_ok", "ha_version": self.sys_homeassistant.version} | ||||
|                 {"type": "auth_ok", "ha_version": self.sys_homeassistant.version}, | ||||
|                 dumps=json_dumps, | ||||
|             ) | ||||
|         except (RuntimeError, ValueError) as err: | ||||
|             _LOGGER.error("Can't initialize handshake: %s", err) | ||||
| @@ -214,13 +247,13 @@ class APIProxy(CoreSysAttributes): | ||||
|  | ||||
|         _LOGGER.info("Home Assistant WebSocket API request running") | ||||
|         try: | ||||
|             client_read = None | ||||
|             server_read = None | ||||
|             client_read: asyncio.Task | None = None | ||||
|             server_read: asyncio.Task | None = None | ||||
|             while not server.closed and not client.closed: | ||||
|                 if not client_read: | ||||
|                     client_read = self.sys_create_task(client.receive_str()) | ||||
|                     client_read = self.sys_create_task(client.receive()) | ||||
|                 if not server_read: | ||||
|                     server_read = self.sys_create_task(server.receive_str()) | ||||
|                     server_read = self.sys_create_task(server.receive()) | ||||
|  | ||||
|                 # wait until data need to be processed | ||||
|                 await asyncio.wait( | ||||
| @@ -229,14 +262,12 @@ class APIProxy(CoreSysAttributes): | ||||
|  | ||||
|                 # server | ||||
|                 if server_read.done() and not client.closed: | ||||
|                     server_read.exception() | ||||
|                     await client.send_str(server_read.result()) | ||||
|                     await self._proxy_message(server_read, client) | ||||
|                     server_read = None | ||||
|  | ||||
|                 # client | ||||
|                 if client_read.done() and not server.closed: | ||||
|                     client_read.exception() | ||||
|                     await server.send_str(client_read.result()) | ||||
|                     await self._proxy_message(client_read, server) | ||||
|                     client_read = None | ||||
|  | ||||
|         except asyncio.CancelledError: | ||||
| @@ -246,9 +277,9 @@ class APIProxy(CoreSysAttributes): | ||||
|             _LOGGER.info("Home Assistant WebSocket API error: %s", err) | ||||
|  | ||||
|         finally: | ||||
|             if client_read: | ||||
|             if client_read and not client_read.done(): | ||||
|                 client_read.cancel() | ||||
|             if server_read: | ||||
|             if server_read and not server_read.done(): | ||||
|                 server_read.cancel() | ||||
|  | ||||
|             # close connections | ||||
| @@ -257,5 +288,5 @@ class APIProxy(CoreSysAttributes): | ||||
|             if not server.closed: | ||||
|                 await server.close() | ||||
|  | ||||
|         _LOGGER.info("Home Assistant WebSocket API connection is closed") | ||||
|         _LOGGER.info("Home Assistant WebSocket API for %s closed", addon_name) | ||||
|         return server | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Handle REST API for resoulution.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from typing import Any | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Root RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| import logging | ||||
| from typing import Any | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Security RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| import logging | ||||
| from typing import Any | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Home Assistant RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from typing import Any | ||||
| @@ -6,7 +7,7 @@ from typing import Any | ||||
| from aiohttp import web | ||||
| import voluptuous as vol | ||||
|  | ||||
| from ..addons import AnyAddon | ||||
| from ..addons.manager import AnyAddon | ||||
| from ..addons.utils import rating_security | ||||
| from ..api.const import ATTR_SIGNED | ||||
| from ..api.utils import api_process, api_process_raw, api_validate | ||||
| @@ -186,18 +187,20 @@ class APIStore(CoreSysAttributes): | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
|     async def addons_list(self, request: web.Request) -> list[dict[str, Any]]: | ||||
|     async def addons_list(self, request: web.Request) -> dict[str, Any]: | ||||
|         """Return all store add-ons.""" | ||||
|         return [ | ||||
|             self._generate_addon_information(self.sys_addons.store[addon]) | ||||
|             for addon in self.sys_addons.store | ||||
|         ] | ||||
|         return { | ||||
|             ATTR_ADDONS: [ | ||||
|                 self._generate_addon_information(self.sys_addons.store[addon]) | ||||
|                 for addon in self.sys_addons.store | ||||
|             ] | ||||
|         } | ||||
|  | ||||
|     @api_process | ||||
|     def addons_addon_install(self, request: web.Request) -> Awaitable[None]: | ||||
|         """Install add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         return asyncio.shield(addon.install()) | ||||
|         return asyncio.shield(self.sys_addons.install(addon.slug)) | ||||
|  | ||||
|     @api_process | ||||
|     async def addons_addon_update(self, request: web.Request) -> None: | ||||
| @@ -209,7 +212,7 @@ class APIStore(CoreSysAttributes): | ||||
|         body = await api_validate(SCHEMA_UPDATE, request) | ||||
|  | ||||
|         if start_task := await asyncio.shield( | ||||
|             addon.update(backup=body.get(ATTR_BACKUP)) | ||||
|             self.sys_addons.update(addon.slug, backup=body.get(ATTR_BACKUP)) | ||||
|         ): | ||||
|             await start_task | ||||
|  | ||||
| @@ -247,9 +250,14 @@ class APIStore(CoreSysAttributes): | ||||
|     @api_process_raw(CONTENT_TYPE_TEXT) | ||||
|     async def addons_addon_changelog(self, request: web.Request) -> str: | ||||
|         """Return changelog from add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         # Frontend can't handle error response here, need to return 200 and error as text for now | ||||
|         try: | ||||
|             addon = self._extract_addon(request) | ||||
|         except APIError as err: | ||||
|             return str(err) | ||||
|  | ||||
|         if not addon.with_changelog: | ||||
|             raise APIError(f"No changelog found for add-on {addon.slug}!") | ||||
|             return f"No changelog found for add-on {addon.slug}!" | ||||
|  | ||||
|         with addon.path_changelog.open("r") as changelog: | ||||
|             return changelog.read() | ||||
| @@ -257,9 +265,14 @@ class APIStore(CoreSysAttributes): | ||||
|     @api_process_raw(CONTENT_TYPE_TEXT) | ||||
|     async def addons_addon_documentation(self, request: web.Request) -> str: | ||||
|         """Return documentation from add-on.""" | ||||
|         addon = self._extract_addon(request) | ||||
|         # Frontend can't handle error response here, need to return 200 and error as text for now | ||||
|         try: | ||||
|             addon = self._extract_addon(request) | ||||
|         except APIError as err: | ||||
|             return str(err) | ||||
|  | ||||
|         if not addon.with_documentation: | ||||
|             raise APIError(f"No documentation found for add-on {addon.slug}!") | ||||
|             return f"No documentation found for add-on {addon.slug}!" | ||||
|  | ||||
|         with addon.path_documentation.open("r") as documentation: | ||||
|             return documentation.read() | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor Supervisor RESTful API.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| import logging | ||||
| @@ -49,7 +50,7 @@ from ..store.validate import repositories | ||||
| from ..utils.sentry import close_sentry, init_sentry | ||||
| from ..utils.validate import validate_timezone | ||||
| from ..validate import version_tag, wait_boot | ||||
| from .const import CONTENT_TYPE_BINARY | ||||
| from .const import CONTENT_TYPE_TEXT | ||||
| from .utils import api_process, api_process_raw, api_validate | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
| @@ -140,7 +141,7 @@ class APISupervisor(CoreSysAttributes): | ||||
|  | ||||
|         if ATTR_DIAGNOSTICS in body: | ||||
|             self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS] | ||||
|             self.sys_dbus.agent.diagnostics = body[ATTR_DIAGNOSTICS] | ||||
|             await self.sys_dbus.agent.set_diagnostics(body[ATTR_DIAGNOSTICS]) | ||||
|  | ||||
|             if body[ATTR_DIAGNOSTICS]: | ||||
|                 init_sentry(self.coresys) | ||||
| @@ -229,7 +230,7 @@ class APISupervisor(CoreSysAttributes): | ||||
|         """Soft restart Supervisor.""" | ||||
|         return asyncio.shield(self.sys_supervisor.restart()) | ||||
|  | ||||
|     @api_process_raw(CONTENT_TYPE_BINARY) | ||||
|     @api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT) | ||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||
|         """Return supervisor Docker logs.""" | ||||
|         return self.sys_supervisor.logs() | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Init file for Supervisor util for RESTful API.""" | ||||
|  | ||||
| import json | ||||
| from typing import Any | ||||
|  | ||||
| @@ -13,6 +14,7 @@ from ..const import ( | ||||
|     HEADER_TOKEN, | ||||
|     HEADER_TOKEN_OLD, | ||||
|     JSON_DATA, | ||||
|     JSON_JOB_ID, | ||||
|     JSON_MESSAGE, | ||||
|     JSON_RESULT, | ||||
|     REQUEST_FROM, | ||||
| @@ -22,9 +24,9 @@ from ..const import ( | ||||
| from ..coresys import CoreSys | ||||
| from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError | ||||
| from ..utils import check_exception_chain, get_message_from_exception_chain | ||||
| from ..utils.json import JSONEncoder | ||||
| from ..utils.json import json_dumps, json_loads as json_loads_util | ||||
| from ..utils.log_format import format_message | ||||
| from .const import CONTENT_TYPE_BINARY | ||||
| from . import const | ||||
|  | ||||
|  | ||||
| def excract_supervisor_token(request: web.Request) -> str | None: | ||||
| @@ -48,7 +50,7 @@ def json_loads(data: Any) -> dict[str, Any]: | ||||
|     if not data: | ||||
|         return {} | ||||
|     try: | ||||
|         return json.loads(data) | ||||
|         return json_loads_util(data) | ||||
|     except json.JSONDecodeError as err: | ||||
|         raise APIError("Invalid json") from err | ||||
|  | ||||
| @@ -90,7 +92,7 @@ def require_home_assistant(method): | ||||
|     return wrap_api | ||||
|  | ||||
|  | ||||
| def api_process_raw(content): | ||||
| def api_process_raw(content, *, error_type=None): | ||||
|     """Wrap content_type into function.""" | ||||
|  | ||||
|     def wrap_method(method): | ||||
| @@ -100,15 +102,15 @@ def api_process_raw(content): | ||||
|             """Return api information.""" | ||||
|             try: | ||||
|                 msg_data = await method(api, *args, **kwargs) | ||||
|                 msg_type = content | ||||
|             except (APIError, APIForbidden) as err: | ||||
|                 msg_data = str(err).encode() | ||||
|                 msg_type = CONTENT_TYPE_BINARY | ||||
|             except HassioError: | ||||
|                 msg_data = b"" | ||||
|                 msg_type = CONTENT_TYPE_BINARY | ||||
|             except HassioError as err: | ||||
|                 return api_return_error( | ||||
|                     err, error_type=error_type or const.CONTENT_TYPE_BINARY | ||||
|                 ) | ||||
|  | ||||
|             return web.Response(body=msg_data, content_type=msg_type) | ||||
|             if isinstance(msg_data, (web.Response, web.StreamResponse)): | ||||
|                 return msg_data | ||||
|  | ||||
|             return web.Response(body=msg_data, content_type=content) | ||||
|  | ||||
|         return wrap_api | ||||
|  | ||||
| @@ -116,21 +118,41 @@ def api_process_raw(content): | ||||
|  | ||||
|  | ||||
| def api_return_error( | ||||
|     error: Exception | None = None, message: str | None = None | ||||
|     error: Exception | None = None, | ||||
|     message: str | None = None, | ||||
|     error_type: str | None = None, | ||||
| ) -> web.Response: | ||||
|     """Return an API error message.""" | ||||
|     if error and not message: | ||||
|         message = get_message_from_exception_chain(error) | ||||
|         if check_exception_chain(error, DockerAPIError): | ||||
|             message = format_message(message) | ||||
|     if not message: | ||||
|         message = "Unknown error, see supervisor" | ||||
|  | ||||
|     status = 400 | ||||
|     if is_api_error := isinstance(error, APIError): | ||||
|         status = error.status | ||||
|  | ||||
|     match error_type: | ||||
|         case const.CONTENT_TYPE_TEXT: | ||||
|             return web.Response(body=message, content_type=error_type, status=status) | ||||
|         case const.CONTENT_TYPE_BINARY: | ||||
|             return web.Response( | ||||
|                 body=message.encode(), content_type=error_type, status=status | ||||
|             ) | ||||
|         case _: | ||||
|             result = { | ||||
|                 JSON_RESULT: RESULT_ERROR, | ||||
|                 JSON_MESSAGE: message, | ||||
|             } | ||||
|             if is_api_error and error.job_id: | ||||
|                 result[JSON_JOB_ID] = error.job_id | ||||
|  | ||||
|     return web.json_response( | ||||
|         { | ||||
|             JSON_RESULT: RESULT_ERROR, | ||||
|             JSON_MESSAGE: message or "Unknown error, see supervisor", | ||||
|         }, | ||||
|         status=400, | ||||
|         dumps=lambda x: json.dumps(x, cls=JSONEncoder), | ||||
|         result, | ||||
|         status=status, | ||||
|         dumps=json_dumps, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @@ -138,7 +160,7 @@ def api_return_ok(data: dict[str, Any] | None = None) -> web.Response: | ||||
|     """Return an API ok answer.""" | ||||
|     return web.json_response( | ||||
|         {JSON_RESULT: RESULT_OK, JSON_DATA: data or {}}, | ||||
|         dumps=lambda x: json.dumps(x, cls=JSONEncoder), | ||||
|         dumps=json_dumps, | ||||
|     ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Handle Arch for underlay maschine/platforms.""" | ||||
|  | ||||
| import logging | ||||
| from pathlib import Path | ||||
| import platform | ||||
| @@ -28,6 +29,7 @@ class CpuArch(CoreSysAttributes): | ||||
|         """Initialize CPU Architecture handler.""" | ||||
|         self.coresys = coresys | ||||
|         self._supported_arch: list[str] = [] | ||||
|         self._supported_set: set[str] = set() | ||||
|         self._default_arch: str | ||||
|  | ||||
|     @property | ||||
| @@ -70,9 +72,11 @@ class CpuArch(CoreSysAttributes): | ||||
|         if native_support not in self._supported_arch: | ||||
|             self._supported_arch.append(native_support) | ||||
|  | ||||
|         self._supported_set = set(self._supported_arch) | ||||
|  | ||||
|     def is_supported(self, arch_list: list[str]) -> bool: | ||||
|         """Return True if there is a supported arch by this platform.""" | ||||
|         return not set(self.supported).isdisjoint(set(arch_list)) | ||||
|         return not self._supported_set.isdisjoint(arch_list) | ||||
|  | ||||
|     def match(self, arch_list: list[str]) -> str: | ||||
|         """Return best match for this CPU/Platform.""" | ||||
|   | ||||
| @@ -1,12 +1,20 @@ | ||||
| """Manage SSO for Add-ons with Home Assistant user.""" | ||||
|  | ||||
| import asyncio | ||||
| import hashlib | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from .addons.addon import Addon | ||||
| from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH | ||||
| from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_TYPE, ATTR_USERNAME, FILE_HASSIO_AUTH | ||||
| from .coresys import CoreSys, CoreSysAttributes | ||||
| from .exceptions import AuthError, AuthPasswordResetError, HomeAssistantAPIError | ||||
| from .exceptions import ( | ||||
|     AuthError, | ||||
|     AuthListUsersError, | ||||
|     AuthPasswordResetError, | ||||
|     HomeAssistantAPIError, | ||||
|     HomeAssistantWSError, | ||||
| ) | ||||
| from .utils.common import FileConfiguration | ||||
| from .validate import SCHEMA_AUTH_CONFIG | ||||
|  | ||||
| @@ -132,6 +140,17 @@ class Auth(FileConfiguration, CoreSysAttributes): | ||||
|  | ||||
|         raise AuthPasswordResetError() | ||||
|  | ||||
|     async def list_users(self) -> list[dict[str, Any]]: | ||||
|         """List users on the Home Assistant instance.""" | ||||
|         try: | ||||
|             return await self.sys_homeassistant.websocket.async_send_command( | ||||
|                 {ATTR_TYPE: "config/auth/list"} | ||||
|             ) | ||||
|         except HomeAssistantWSError: | ||||
|             _LOGGER.error("Can't request listing users on Home Assistant!") | ||||
|  | ||||
|         raise AuthListUsersError() | ||||
|  | ||||
|     @staticmethod | ||||
|     def _rehash(value: str, salt2: str = "") -> str: | ||||
|         """Rehash a value.""" | ||||
|   | ||||
| @@ -1,13 +1,19 @@ | ||||
| """Representation of a backup file.""" | ||||
|  | ||||
| import asyncio | ||||
| from base64 import b64decode, b64encode | ||||
| from collections import defaultdict | ||||
| from collections.abc import Awaitable | ||||
| from copy import deepcopy | ||||
| from datetime import timedelta | ||||
| from functools import cached_property | ||||
| import io | ||||
| import json | ||||
| import logging | ||||
| from pathlib import Path | ||||
| import tarfile | ||||
| from tempfile import TemporaryDirectory | ||||
| import time | ||||
| from typing import Any | ||||
|  | ||||
| from awesomeversion import AwesomeVersion, AwesomeVersionCompareException | ||||
| @@ -18,13 +24,14 @@ from securetar import SecureTarFile, atomic_contents_add, secure_path | ||||
| import voluptuous as vol | ||||
| from voluptuous.humanize import humanize_error | ||||
|  | ||||
| from ..addons import Addon | ||||
| from ..addons.manager import Addon | ||||
| from ..const import ( | ||||
|     ATTR_ADDONS, | ||||
|     ATTR_COMPRESSED, | ||||
|     ATTR_CRYPTO, | ||||
|     ATTR_DATE, | ||||
|     ATTR_DOCKER, | ||||
|     ATTR_EXCLUDE_DATABASE, | ||||
|     ATTR_FOLDERS, | ||||
|     ATTR_HOMEASSISTANT, | ||||
|     ATTR_NAME, | ||||
| @@ -40,11 +47,14 @@ from ..const import ( | ||||
|     ATTR_VERSION, | ||||
|     CRYPTO_AES128, | ||||
| ) | ||||
| from ..coresys import CoreSys, CoreSysAttributes | ||||
| from ..exceptions import AddonsError, BackupError | ||||
| from ..coresys import CoreSys | ||||
| from ..exceptions import AddonsError, BackupError, BackupInvalidError | ||||
| from ..jobs.const import JOB_GROUP_BACKUP | ||||
| from ..jobs.decorator import Job | ||||
| from ..jobs.job_group import JobGroup | ||||
| from ..utils import remove_folder | ||||
| from ..utils.dt import parse_datetime, utcnow | ||||
| from ..utils.json import write_json_file | ||||
| from ..utils.json import json_bytes | ||||
| from .const import BUF_SIZE, BackupType | ||||
| from .utils import key_to_iv, password_to_key | ||||
| from .validate import SCHEMA_BACKUP | ||||
| @@ -52,15 +62,25 @@ from .validate import SCHEMA_BACKUP | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class Backup(CoreSysAttributes): | ||||
| class Backup(JobGroup): | ||||
|     """A single Supervisor backup.""" | ||||
|  | ||||
|     def __init__(self, coresys: CoreSys, tar_file: Path): | ||||
|     def __init__( | ||||
|         self, | ||||
|         coresys: CoreSys, | ||||
|         tar_file: Path, | ||||
|         slug: str, | ||||
|         data: dict[str, Any] | None = None, | ||||
|     ): | ||||
|         """Initialize a backup.""" | ||||
|         self.coresys: CoreSys = coresys | ||||
|         super().__init__( | ||||
|             coresys, JOB_GROUP_BACKUP.format_map(defaultdict(str, slug=slug)), slug | ||||
|         ) | ||||
|         self._tarfile: Path = tar_file | ||||
|         self._data: dict[str, Any] = {} | ||||
|         self._data: dict[str, Any] = data or {ATTR_SLUG: slug} | ||||
|         self._tmp = None | ||||
|         self._outer_secure_tarfile: SecureTarFile | None = None | ||||
|         self._outer_secure_tarfile_tarfile: tarfile.TarFile | None = None | ||||
|         self._key: bytes | None = None | ||||
|         self._aes: Cipher | None = None | ||||
|  | ||||
| @@ -85,7 +105,7 @@ class Backup(CoreSysAttributes): | ||||
|         return self._data[ATTR_NAME] | ||||
|  | ||||
|     @property | ||||
|     def date(self): | ||||
|     def date(self) -> str: | ||||
|         """Return backup date.""" | ||||
|         return self._data[ATTR_DATE] | ||||
|  | ||||
| @@ -100,39 +120,46 @@ class Backup(CoreSysAttributes): | ||||
|         return self._data[ATTR_COMPRESSED] | ||||
|  | ||||
|     @property | ||||
|     def addons(self): | ||||
|     def addons(self) -> list[dict[str, Any]]: | ||||
|         """Return backup date.""" | ||||
|         return self._data[ATTR_ADDONS] | ||||
|  | ||||
|     @property | ||||
|     def addon_list(self): | ||||
|     def addon_list(self) -> list[str]: | ||||
|         """Return a list of add-ons slugs.""" | ||||
|         return [addon_data[ATTR_SLUG] for addon_data in self.addons] | ||||
|  | ||||
|     @property | ||||
|     def folders(self): | ||||
|     def folders(self) -> list[str]: | ||||
|         """Return list of saved folders.""" | ||||
|         return self._data[ATTR_FOLDERS] | ||||
|  | ||||
|     @property | ||||
|     def repositories(self): | ||||
|     def repositories(self) -> list[str]: | ||||
|         """Return backup date.""" | ||||
|         return self._data[ATTR_REPOSITORIES] | ||||
|  | ||||
|     @repositories.setter | ||||
|     def repositories(self, value): | ||||
|     def repositories(self, value: list[str]) -> None: | ||||
|         """Set backup date.""" | ||||
|         self._data[ATTR_REPOSITORIES] = value | ||||
|  | ||||
|     @property | ||||
|     def homeassistant_version(self): | ||||
|     def homeassistant_version(self) -> AwesomeVersion: | ||||
|         """Return backup Home Assistant version.""" | ||||
|         if self.homeassistant is None: | ||||
|             return None | ||||
|         return self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] | ||||
|         return self.homeassistant[ATTR_VERSION] | ||||
|  | ||||
|     @property | ||||
|     def homeassistant(self): | ||||
|     def homeassistant_exclude_database(self) -> bool: | ||||
|         """Return whether database was excluded from Home Assistant backup.""" | ||||
|         if self.homeassistant is None: | ||||
|             return None | ||||
|         return self.homeassistant[ATTR_EXCLUDE_DATABASE] | ||||
|  | ||||
|     @property | ||||
|     def homeassistant(self) -> dict[str, Any]: | ||||
|         """Return backup Home Assistant data.""" | ||||
|         return self._data[ATTR_HOMEASSISTANT] | ||||
|  | ||||
| @@ -142,12 +169,12 @@ class Backup(CoreSysAttributes): | ||||
|         return self._data[ATTR_SUPERVISOR_VERSION] | ||||
|  | ||||
|     @property | ||||
|     def docker(self): | ||||
|     def docker(self) -> dict[str, Any]: | ||||
|         """Return backup Docker config data.""" | ||||
|         return self._data.get(ATTR_DOCKER, {}) | ||||
|  | ||||
|     @docker.setter | ||||
|     def docker(self, value): | ||||
|     def docker(self, value: dict[str, Any]) -> None: | ||||
|         """Set the Docker config data.""" | ||||
|         self._data[ATTR_DOCKER] = value | ||||
|  | ||||
| @@ -160,32 +187,36 @@ class Backup(CoreSysAttributes): | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def size(self): | ||||
|     def size(self) -> float: | ||||
|         """Return backup size.""" | ||||
|         if not self.tarfile.is_file(): | ||||
|             return 0 | ||||
|         return round(self.tarfile.stat().st_size / 1048576, 2)  # calc mbyte | ||||
|  | ||||
|     @property | ||||
|     def is_new(self): | ||||
|     def is_new(self) -> bool: | ||||
|         """Return True if there is new.""" | ||||
|         return not self.tarfile.exists() | ||||
|  | ||||
|     @property | ||||
|     def tarfile(self): | ||||
|     def tarfile(self) -> Path: | ||||
|         """Return path to backup tarfile.""" | ||||
|         return self._tarfile | ||||
|  | ||||
|     @property | ||||
|     def is_current(self): | ||||
|     def is_current(self) -> bool: | ||||
|         """Return true if backup is current, false if stale.""" | ||||
|         return parse_datetime(self.date) >= utcnow() - timedelta( | ||||
|             days=self.sys_backups.days_until_stale | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def data(self) -> dict[str, Any]: | ||||
|         """Returns a copy of the data.""" | ||||
|         return deepcopy(self._data) | ||||
|  | ||||
|     def new( | ||||
|         self, | ||||
|         slug: str, | ||||
|         name: str, | ||||
|         date: str, | ||||
|         sys_type: BackupType, | ||||
| @@ -195,7 +226,6 @@ class Backup(CoreSysAttributes): | ||||
|         """Initialize a new backup.""" | ||||
|         # Init metadata | ||||
|         self._data[ATTR_VERSION] = 2 | ||||
|         self._data[ATTR_SLUG] = slug | ||||
|         self._data[ATTR_NAME] = name | ||||
|         self._data[ATTR_DATE] = date | ||||
|         self._data[ATTR_TYPE] = sys_type | ||||
| @@ -296,25 +326,55 @@ class Backup(CoreSysAttributes): | ||||
|  | ||||
|     async def __aenter__(self): | ||||
|         """Async context to open a backup.""" | ||||
|         self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent)) | ||||
|  | ||||
|         # create a backup | ||||
|         if not self.tarfile.is_file(): | ||||
|             return self | ||||
|             self._outer_secure_tarfile = SecureTarFile( | ||||
|                 self.tarfile, | ||||
|                 "w", | ||||
|                 gzip=False, | ||||
|                 bufsize=BUF_SIZE, | ||||
|             ) | ||||
|             self._outer_secure_tarfile_tarfile = self._outer_secure_tarfile.__enter__() | ||||
|             return | ||||
|  | ||||
|         # extract an existing backup | ||||
|         self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent)) | ||||
|  | ||||
|         def _extract_backup(): | ||||
|             """Extract a backup.""" | ||||
|             with tarfile.open(self.tarfile, "r:") as tar: | ||||
|                 tar.extractall(path=self._tmp.name, members=secure_path(tar)) | ||||
|                 tar.extractall( | ||||
|                     path=self._tmp.name, | ||||
|                     members=secure_path(tar), | ||||
|                     filter="fully_trusted", | ||||
|                 ) | ||||
|  | ||||
|         await self.sys_run_in_executor(_extract_backup) | ||||
|  | ||||
|     async def __aexit__(self, exception_type, exception_value, traceback): | ||||
|         """Async context to close a backup.""" | ||||
|         # exists backup or exception on build | ||||
|         if self.tarfile.is_file() or exception_type is not None: | ||||
|             self._tmp.cleanup() | ||||
|         try: | ||||
|             await self._aexit(exception_type, exception_value, traceback) | ||||
|         finally: | ||||
|             if self._tmp: | ||||
|                 self._tmp.cleanup() | ||||
|             if self._outer_secure_tarfile: | ||||
|                 self._outer_secure_tarfile.__exit__( | ||||
|                     exception_type, exception_value, traceback | ||||
|                 ) | ||||
|                 self._outer_secure_tarfile = None | ||||
|                 self._outer_secure_tarfile_tarfile = None | ||||
|  | ||||
|     async def _aexit(self, exception_type, exception_value, traceback): | ||||
|         """Cleanup after backup creation. | ||||
|  | ||||
|         This is a separate method to allow it to be called from __aexit__ to ensure | ||||
|         that cleanup is always performed, even if an exception is raised. | ||||
|         """ | ||||
|         # If we're not creating a new backup, or if an exception was raised, we're done | ||||
|         if not self._outer_secure_tarfile or exception_type is not None: | ||||
|             return | ||||
|  | ||||
|         # validate data | ||||
| @@ -327,157 +387,254 @@ class Backup(CoreSysAttributes): | ||||
|             raise ValueError("Invalid config") from None | ||||
|  | ||||
|         # new backup, build it | ||||
|         def _create_backup(): | ||||
|         def _add_backup_json(): | ||||
|             """Create a new backup.""" | ||||
|             with tarfile.open(self.tarfile, "w:") as tar: | ||||
|                 tar.add(self._tmp.name, arcname=".") | ||||
|             raw_bytes = json_bytes(self._data) | ||||
|             fileobj = io.BytesIO(raw_bytes) | ||||
|             tar_info = tarfile.TarInfo(name="./backup.json") | ||||
|             tar_info.size = len(raw_bytes) | ||||
|             tar_info.mtime = int(time.time()) | ||||
|             self._outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj) | ||||
|  | ||||
|         try: | ||||
|             write_json_file(Path(self._tmp.name, "backup.json"), self._data) | ||||
|             await self.sys_run_in_executor(_create_backup) | ||||
|             await self.sys_run_in_executor(_add_backup_json) | ||||
|         except (OSError, json.JSONDecodeError) as err: | ||||
|             self.sys_jobs.current.capture_error(BackupError("Can't write backup")) | ||||
|             _LOGGER.error("Can't write backup: %s", err) | ||||
|         finally: | ||||
|             self._tmp.cleanup() | ||||
|  | ||||
|     async def store_addons(self, addon_list: list[str]) -> list[Awaitable[None]]: | ||||
|     @Job(name="backup_addon_save", cleanup=False) | ||||
|     async def _addon_save(self, addon: Addon) -> asyncio.Task | None: | ||||
|         """Store an add-on into backup.""" | ||||
|         self.sys_jobs.current.reference = addon.slug | ||||
|  | ||||
|         tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}" | ||||
|  | ||||
|         addon_file = self._outer_secure_tarfile.create_inner_tar( | ||||
|             f"./{tar_name}", | ||||
|             gzip=self.compressed, | ||||
|             key=self._key, | ||||
|         ) | ||||
|         # Take backup | ||||
|         try: | ||||
|             start_task = await addon.backup(addon_file) | ||||
|         except AddonsError as err: | ||||
|             raise BackupError( | ||||
|                 f"Can't create backup for {addon.slug}", _LOGGER.error | ||||
|             ) from err | ||||
|  | ||||
|         # Store to config | ||||
|         self._data[ATTR_ADDONS].append( | ||||
|             { | ||||
|                 ATTR_SLUG: addon.slug, | ||||
|                 ATTR_NAME: addon.name, | ||||
|                 ATTR_VERSION: addon.version, | ||||
|                 ATTR_SIZE: addon_file.size, | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|         return start_task | ||||
|  | ||||
|     @Job(name="backup_store_addons", cleanup=False) | ||||
|     async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]: | ||||
|         """Add a list of add-ons into backup. | ||||
|  | ||||
|         For each addon that needs to be started after backup, returns a task which | ||||
|         For each addon that needs to be started after backup, returns a Task which | ||||
|         completes when that addon has state 'started' (see addon.start). | ||||
|         """ | ||||
|  | ||||
|         async def _addon_save(addon: Addon) -> Awaitable[None] | None: | ||||
|             """Task to store an add-on into backup.""" | ||||
|             tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}" | ||||
|             addon_file = SecureTarFile( | ||||
|                 Path(self._tmp.name, tar_name), | ||||
|                 "w", | ||||
|                 key=self._key, | ||||
|                 gzip=self.compressed, | ||||
|                 bufsize=BUF_SIZE, | ||||
|             ) | ||||
|  | ||||
|             # Take backup | ||||
|             try: | ||||
|                 start_task = await addon.backup(addon_file) | ||||
|             except AddonsError: | ||||
|                 _LOGGER.error("Can't create backup for %s", addon.slug) | ||||
|                 return | ||||
|  | ||||
|             # Store to config | ||||
|             self._data[ATTR_ADDONS].append( | ||||
|                 { | ||||
|                     ATTR_SLUG: addon.slug, | ||||
|                     ATTR_NAME: addon.name, | ||||
|                     ATTR_VERSION: addon.version, | ||||
|                     ATTR_SIZE: addon_file.size, | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|             return start_task | ||||
|  | ||||
|         # Save Add-ons sequential | ||||
|         # avoid issue on slow IO | ||||
|         start_tasks: list[Awaitable[None]] = [] | ||||
|         # Save Add-ons sequential avoid issue on slow IO | ||||
|         start_tasks: list[asyncio.Task] = [] | ||||
|         for addon in addon_list: | ||||
|             try: | ||||
|                 if start_task := await _addon_save(addon): | ||||
|                 if start_task := await self._addon_save(addon): | ||||
|                     start_tasks.append(start_task) | ||||
|             except Exception as err:  # pylint: disable=broad-except | ||||
|                 _LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err) | ||||
|  | ||||
|         return start_tasks | ||||
|  | ||||
|     async def restore_addons(self, addon_list: list[str]) -> list[Awaitable[None]]: | ||||
|     @Job(name="backup_addon_restore", cleanup=False) | ||||
|     async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None: | ||||
|         """Restore an add-on from backup.""" | ||||
|         self.sys_jobs.current.reference = addon_slug | ||||
|  | ||||
|         tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" | ||||
|         addon_file = SecureTarFile( | ||||
|             Path(self._tmp.name, tar_name), | ||||
|             "r", | ||||
|             key=self._key, | ||||
|             gzip=self.compressed, | ||||
|             bufsize=BUF_SIZE, | ||||
|         ) | ||||
|  | ||||
|         # If exists inside backup | ||||
|         if not addon_file.path.exists(): | ||||
|             raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error) | ||||
|  | ||||
|         # Perform a restore | ||||
|         try: | ||||
|             return await self.sys_addons.restore(addon_slug, addon_file) | ||||
|         except AddonsError as err: | ||||
|             raise BackupError( | ||||
|                 f"Can't restore backup {addon_slug}", _LOGGER.error | ||||
|             ) from err | ||||
|  | ||||
|     @Job(name="backup_restore_addons", cleanup=False) | ||||
|     async def restore_addons( | ||||
|         self, addon_list: list[str] | ||||
|     ) -> tuple[bool, list[asyncio.Task]]: | ||||
|         """Restore a list add-on from backup.""" | ||||
|  | ||||
|         async def _addon_restore(addon_slug: str) -> Awaitable[None] | None: | ||||
|             """Task to restore an add-on into backup.""" | ||||
|             tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" | ||||
|             addon_file = SecureTarFile( | ||||
|                 Path(self._tmp.name, tar_name), | ||||
|                 "r", | ||||
|                 key=self._key, | ||||
|                 gzip=self.compressed, | ||||
|                 bufsize=BUF_SIZE, | ||||
|             ) | ||||
|  | ||||
|             # If exists inside backup | ||||
|             if not addon_file.path.exists(): | ||||
|                 _LOGGER.error("Can't find backup %s", addon_slug) | ||||
|                 return | ||||
|  | ||||
|             # Perform a restore | ||||
|             try: | ||||
|                 return await self.sys_addons.restore(addon_slug, addon_file) | ||||
|             except AddonsError: | ||||
|                 _LOGGER.error("Can't restore backup %s", addon_slug) | ||||
|  | ||||
|         # Save Add-ons sequential | ||||
|         # avoid issue on slow IO | ||||
|         start_tasks: list[Awaitable[None]] = [] | ||||
|         # Save Add-ons sequential avoid issue on slow IO | ||||
|         start_tasks: list[asyncio.Task] = [] | ||||
|         success = True | ||||
|         for slug in addon_list: | ||||
|             try: | ||||
|                 if start_task := await _addon_restore(slug): | ||||
|                     start_tasks.append(start_task) | ||||
|                 start_task = await self._addon_restore(slug) | ||||
|             except Exception as err:  # pylint: disable=broad-except | ||||
|                 _LOGGER.warning("Can't restore Add-on %s: %s", slug, err) | ||||
|                 success = False | ||||
|             else: | ||||
|                 if start_task: | ||||
|                     start_tasks.append(start_task) | ||||
|  | ||||
|         return start_tasks | ||||
|         return (success, start_tasks) | ||||
|  | ||||
|     @Job(name="backup_remove_delta_addons", cleanup=False) | ||||
|     async def remove_delta_addons(self) -> bool: | ||||
|         """Remove addons which are not in this backup.""" | ||||
|         success = True | ||||
|         for addon in self.sys_addons.installed: | ||||
|             if addon.slug in self.addon_list: | ||||
|                 continue | ||||
|  | ||||
|             # Remove Add-on because it's not a part of the new env | ||||
|             # Do it sequential avoid issue on slow IO | ||||
|             try: | ||||
|                 await self.sys_addons.uninstall(addon.slug) | ||||
|             except AddonsError as err: | ||||
|                 self.sys_jobs.current.capture_error(err) | ||||
|                 _LOGGER.warning("Can't uninstall Add-on %s: %s", addon.slug, err) | ||||
|                 success = False | ||||
|  | ||||
|         return success | ||||
|  | ||||
|     @Job(name="backup_folder_save", cleanup=False) | ||||
|     async def _folder_save(self, name: str): | ||||
|         """Take backup of a folder.""" | ||||
|         self.sys_jobs.current.reference = name | ||||
|  | ||||
|         slug_name = name.replace("/", "_") | ||||
|         tar_name = f"{slug_name}.tar{'.gz' if self.compressed else ''}" | ||||
|         origin_dir = Path(self.sys_config.path_supervisor, name) | ||||
|  | ||||
|         # Check if exists | ||||
|         if not origin_dir.is_dir(): | ||||
|             _LOGGER.warning("Can't find backup folder %s", name) | ||||
|             return | ||||
|  | ||||
|         def _save() -> None: | ||||
|             # Take backup | ||||
|             _LOGGER.info("Backing up folder %s", name) | ||||
|  | ||||
|             with self._outer_secure_tarfile.create_inner_tar( | ||||
|                 f"./{tar_name}", | ||||
|                 gzip=self.compressed, | ||||
|                 key=self._key, | ||||
|             ) as tar_file: | ||||
|                 atomic_contents_add( | ||||
|                     tar_file, | ||||
|                     origin_dir, | ||||
|                     excludes=[ | ||||
|                         bound.bind_mount.local_where.as_posix() | ||||
|                         for bound in self.sys_mounts.bound_mounts | ||||
|                         if bound.bind_mount.local_where | ||||
|                     ], | ||||
|                     arcname=".", | ||||
|                 ) | ||||
|  | ||||
|             _LOGGER.info("Backup folder %s done", name) | ||||
|  | ||||
|         try: | ||||
|             await self.sys_run_in_executor(_save) | ||||
|         except (tarfile.TarError, OSError) as err: | ||||
|             raise BackupError( | ||||
|                 f"Can't backup folder {name}: {str(err)}", _LOGGER.error | ||||
|             ) from err | ||||
|  | ||||
|         self._data[ATTR_FOLDERS].append(name) | ||||
|  | ||||
|     @Job(name="backup_store_folders", cleanup=False) | ||||
|     async def store_folders(self, folder_list: list[str]): | ||||
|         """Backup Supervisor data into backup.""" | ||||
|  | ||||
|         async def _folder_save(name: str): | ||||
|             """Take backup of a folder.""" | ||||
|             slug_name = name.replace("/", "_") | ||||
|             tar_name = Path( | ||||
|                 self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}" | ||||
|             ) | ||||
|             origin_dir = Path(self.sys_config.path_supervisor, name) | ||||
|  | ||||
|             # Check if exists | ||||
|             if not origin_dir.is_dir(): | ||||
|                 _LOGGER.warning("Can't find backup folder %s", name) | ||||
|                 return | ||||
|  | ||||
|             def _save() -> None: | ||||
|                 # Take backup | ||||
|                 _LOGGER.info("Backing up folder %s", name) | ||||
|                 with SecureTarFile( | ||||
|                     tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE | ||||
|                 ) as tar_file: | ||||
|                     atomic_contents_add( | ||||
|                         tar_file, | ||||
|                         origin_dir, | ||||
|                         excludes=[ | ||||
|                             bound.bind_mount.local_where.as_posix() | ||||
|                             for bound in self.sys_mounts.bound_mounts | ||||
|                             if bound.bind_mount.local_where | ||||
|                         ], | ||||
|                         arcname=".", | ||||
|                     ) | ||||
|  | ||||
|                 _LOGGER.info("Backup folder %s done", name) | ||||
|  | ||||
|             await self.sys_run_in_executor(_save) | ||||
|             self._data[ATTR_FOLDERS].append(name) | ||||
|  | ||||
|         # Save folder sequential | ||||
|         # avoid issue on slow IO | ||||
|         # Save folder sequential avoid issue on slow IO | ||||
|         for folder in folder_list: | ||||
|             await self._folder_save(folder) | ||||
|  | ||||
|     @Job(name="backup_folder_restore", cleanup=False) | ||||
|     async def _folder_restore(self, name: str) -> None: | ||||
|         """Restore a folder.""" | ||||
|         self.sys_jobs.current.reference = name | ||||
|  | ||||
|         slug_name = name.replace("/", "_") | ||||
|         tar_name = Path( | ||||
|             self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}" | ||||
|         ) | ||||
|         origin_dir = Path(self.sys_config.path_supervisor, name) | ||||
|  | ||||
|         # Check if exists inside backup | ||||
|         if not tar_name.exists(): | ||||
|             raise BackupInvalidError( | ||||
|                 f"Can't find restore folder {name}", _LOGGER.warning | ||||
|             ) | ||||
|  | ||||
|         # Unmount any mounts within folder | ||||
|         bind_mounts = [ | ||||
|             bound.bind_mount | ||||
|             for bound in self.sys_mounts.bound_mounts | ||||
|             if bound.bind_mount.local_where | ||||
|             and bound.bind_mount.local_where.is_relative_to(origin_dir) | ||||
|         ] | ||||
|         if bind_mounts: | ||||
|             await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts]) | ||||
|  | ||||
|         # Clean old stuff | ||||
|         if origin_dir.is_dir(): | ||||
|             await remove_folder(origin_dir, content_only=True) | ||||
|  | ||||
|         # Perform a restore | ||||
|         def _restore() -> bool: | ||||
|             try: | ||||
|                 await _folder_save(folder) | ||||
|                 _LOGGER.info("Restore folder %s", name) | ||||
|                 with SecureTarFile( | ||||
|                     tar_name, | ||||
|                     "r", | ||||
|                     key=self._key, | ||||
|                     gzip=self.compressed, | ||||
|                     bufsize=BUF_SIZE, | ||||
|                 ) as tar_file: | ||||
|                     tar_file.extractall( | ||||
|                         path=origin_dir, members=tar_file, filter="fully_trusted" | ||||
|                     ) | ||||
|                 _LOGGER.info("Restore folder %s done", name) | ||||
|             except (tarfile.TarError, OSError) as err: | ||||
|                 raise BackupError( | ||||
|                     f"Can't backup folder {folder}: {str(err)}", _LOGGER.error | ||||
|                     f"Can't restore folder {name}: {err}", _LOGGER.warning | ||||
|                 ) from err | ||||
|             return True | ||||
|  | ||||
|     async def restore_folders(self, folder_list: list[str]): | ||||
|         try: | ||||
|             return await self.sys_run_in_executor(_restore) | ||||
|         finally: | ||||
|             if bind_mounts: | ||||
|                 await asyncio.gather( | ||||
|                     *[bind_mount.mount() for bind_mount in bind_mounts] | ||||
|                 ) | ||||
|  | ||||
|     @Job(name="backup_restore_folders", cleanup=False) | ||||
|     async def restore_folders(self, folder_list: list[str]) -> bool: | ||||
|         """Backup Supervisor data into backup.""" | ||||
|         success = True | ||||
|  | ||||
|         async def _folder_restore(name: str) -> None: | ||||
|         async def _folder_restore(name: str) -> bool: | ||||
|             """Intenal function to restore a folder.""" | ||||
|             slug_name = name.replace("/", "_") | ||||
|             tar_name = Path( | ||||
| @@ -488,14 +645,26 @@ class Backup(CoreSysAttributes): | ||||
|             # Check if exists inside backup | ||||
|             if not tar_name.exists(): | ||||
|                 _LOGGER.warning("Can't find restore folder %s", name) | ||||
|                 return | ||||
|                 return False | ||||
|  | ||||
|             # Unmount any mounts within folder | ||||
|             bind_mounts = [ | ||||
|                 bound.bind_mount | ||||
|                 for bound in self.sys_mounts.bound_mounts | ||||
|                 if bound.bind_mount.local_where | ||||
|                 and bound.bind_mount.local_where.is_relative_to(origin_dir) | ||||
|             ] | ||||
|             if bind_mounts: | ||||
|                 await asyncio.gather( | ||||
|                     *[bind_mount.unmount() for bind_mount in bind_mounts] | ||||
|                 ) | ||||
|  | ||||
|             # Clean old stuff | ||||
|             if origin_dir.is_dir(): | ||||
|                 await remove_folder(origin_dir, content_only=True) | ||||
|  | ||||
|             # Perform a restore | ||||
|             def _restore() -> None: | ||||
|             def _restore() -> bool: | ||||
|                 try: | ||||
|                     _LOGGER.info("Restore folder %s", name) | ||||
|                     with SecureTarFile( | ||||
| @@ -505,40 +674,56 @@ class Backup(CoreSysAttributes): | ||||
|                         gzip=self.compressed, | ||||
|                         bufsize=BUF_SIZE, | ||||
|                     ) as tar_file: | ||||
|                         tar_file.extractall(path=origin_dir, members=tar_file) | ||||
|                         tar_file.extractall( | ||||
|                             path=origin_dir, members=tar_file, filter="fully_trusted" | ||||
|                         ) | ||||
|                     _LOGGER.info("Restore folder %s done", name) | ||||
|                 except (tarfile.TarError, OSError) as err: | ||||
|                     _LOGGER.warning("Can't restore folder %s: %s", name, err) | ||||
|                     return False | ||||
|                 return True | ||||
|  | ||||
|             await self.sys_run_in_executor(_restore) | ||||
|             try: | ||||
|                 return await self.sys_run_in_executor(_restore) | ||||
|             finally: | ||||
|                 if bind_mounts: | ||||
|                     await asyncio.gather( | ||||
|                         *[bind_mount.mount() for bind_mount in bind_mounts] | ||||
|                     ) | ||||
|  | ||||
|         # Restore folder sequential | ||||
|         # avoid issue on slow IO | ||||
|         # Restore folder sequential avoid issue on slow IO | ||||
|         for folder in folder_list: | ||||
|             try: | ||||
|                 await _folder_restore(folder) | ||||
|                 await self._folder_restore(folder) | ||||
|             except Exception as err:  # pylint: disable=broad-except | ||||
|                 _LOGGER.warning("Can't restore folder %s: %s", folder, err) | ||||
|                 success = False | ||||
|         return success | ||||
|  | ||||
|     async def store_homeassistant(self): | ||||
|         """Backup Home Assitant Core configuration folder.""" | ||||
|         self._data[ATTR_HOMEASSISTANT] = {ATTR_VERSION: self.sys_homeassistant.version} | ||||
|     @Job(name="backup_store_homeassistant", cleanup=False) | ||||
|     async def store_homeassistant(self, exclude_database: bool = False): | ||||
|         """Backup Home Assistant Core configuration folder.""" | ||||
|         self._data[ATTR_HOMEASSISTANT] = { | ||||
|             ATTR_VERSION: self.sys_homeassistant.version, | ||||
|             ATTR_EXCLUDE_DATABASE: exclude_database, | ||||
|         } | ||||
|  | ||||
|         tar_name = f"homeassistant.tar{'.gz' if self.compressed else ''}" | ||||
|         # Backup Home Assistant Core config directory | ||||
|         tar_name = Path( | ||||
|             self._tmp.name, f"homeassistant.tar{'.gz' if self.compressed else ''}" | ||||
|         ) | ||||
|         homeassistant_file = SecureTarFile( | ||||
|             tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE | ||||
|         homeassistant_file = self._outer_secure_tarfile.create_inner_tar( | ||||
|             f"./{tar_name}", | ||||
|             gzip=self.compressed, | ||||
|             key=self._key, | ||||
|         ) | ||||
|  | ||||
|         await self.sys_homeassistant.backup(homeassistant_file) | ||||
|         await self.sys_homeassistant.backup(homeassistant_file, exclude_database) | ||||
|  | ||||
|         # Store size | ||||
|         self.homeassistant[ATTR_SIZE] = homeassistant_file.size | ||||
|  | ||||
|     @Job(name="backup_restore_homeassistant", cleanup=False) | ||||
|     async def restore_homeassistant(self) -> Awaitable[None]: | ||||
|         """Restore Home Assitant Core configuration folder.""" | ||||
|         """Restore Home Assistant Core configuration folder.""" | ||||
|         await self.sys_homeassistant.core.stop() | ||||
|  | ||||
|         # Restore Home Assistant Core config directory | ||||
| @@ -549,7 +734,9 @@ class Backup(CoreSysAttributes): | ||||
|             tar_name, "r", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE | ||||
|         ) | ||||
|  | ||||
|         await self.sys_homeassistant.restore(homeassistant_file) | ||||
|         await self.sys_homeassistant.restore( | ||||
|             homeassistant_file, self.homeassistant_exclude_database | ||||
|         ) | ||||
|  | ||||
|         # Generate restore task | ||||
|         async def _core_update(): | ||||
| @@ -568,16 +755,16 @@ class Backup(CoreSysAttributes): | ||||
|  | ||||
|         return self.sys_create_task(_core_update()) | ||||
|  | ||||
|     def store_repositories(self): | ||||
|     def store_repositories(self) -> None: | ||||
|         """Store repository list into backup.""" | ||||
|         self.repositories = self.sys_store.repository_urls | ||||
|  | ||||
|     async def restore_repositories(self, replace: bool = False): | ||||
|     def restore_repositories(self, replace: bool = False) -> Awaitable[None]: | ||||
|         """Restore repositories from backup. | ||||
|  | ||||
|         Return a coroutine. | ||||
|         """ | ||||
|         await self.sys_store.update_repositories( | ||||
|         return self.sys_store.update_repositories( | ||||
|             self.repositories, add_with_errors=True, replace=replace | ||||
|         ) | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,39 @@ | ||||
| """Backup consts.""" | ||||
| from enum import Enum | ||||
|  | ||||
| from enum import StrEnum | ||||
|  | ||||
| BUF_SIZE = 2**20 * 4  # 4MB | ||||
| DEFAULT_FREEZE_TIMEOUT = 600 | ||||
|  | ||||
|  | ||||
| class BackupType(str, Enum): | ||||
| class BackupType(StrEnum): | ||||
|     """Backup type enum.""" | ||||
|  | ||||
|     FULL = "full" | ||||
|     PARTIAL = "partial" | ||||
|  | ||||
|  | ||||
| class BackupJobStage(StrEnum): | ||||
|     """Backup job stage enum.""" | ||||
|  | ||||
|     ADDON_REPOSITORIES = "addon_repositories" | ||||
|     ADDONS = "addons" | ||||
|     DOCKER_CONFIG = "docker_config" | ||||
|     FINISHING_FILE = "finishing_file" | ||||
|     FOLDERS = "folders" | ||||
|     HOME_ASSISTANT = "home_assistant" | ||||
|     AWAIT_ADDON_RESTARTS = "await_addon_restarts" | ||||
|  | ||||
|  | ||||
| class RestoreJobStage(StrEnum): | ||||
|     """Restore job stage enum.""" | ||||
|  | ||||
|     ADDON_REPOSITORIES = "addon_repositories" | ||||
|     ADDONS = "addons" | ||||
|     AWAIT_ADDON_RESTARTS = "await_addon_restarts" | ||||
|     AWAIT_HOME_ASSISTANT_RESTART = "await_home_assistant_restart" | ||||
|     CHECK_HOME_ASSISTANT = "check_home_assistant" | ||||
|     DOCKER_CONFIG = "docker_config" | ||||
|     FOLDERS = "folders" | ||||
|     HOME_ASSISTANT = "home_assistant" | ||||
|     REMOVE_DELTA_ADDONS = "remove_delta_addons" | ||||
|   | ||||
| @@ -1,56 +1,59 @@ | ||||
| """Backup manager.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable, Iterable | ||||
| import errno | ||||
| import logging | ||||
| from pathlib import Path | ||||
|  | ||||
| from ..addons.addon import Addon | ||||
| from ..const import ( | ||||
|     ATTR_DATA, | ||||
|     ATTR_DAYS_UNTIL_STALE, | ||||
|     ATTR_SLUG, | ||||
|     ATTR_TYPE, | ||||
|     FILE_HASSIO_BACKUPS, | ||||
|     FOLDER_HOMEASSISTANT, | ||||
|     CoreState, | ||||
| ) | ||||
| from ..coresys import CoreSysAttributes | ||||
| from ..dbus.const import UnitActiveState | ||||
| from ..exceptions import AddonsError | ||||
| from ..jobs.decorator import Job, JobCondition | ||||
| from ..exceptions import ( | ||||
|     BackupError, | ||||
|     BackupInvalidError, | ||||
|     BackupJobError, | ||||
|     BackupMountDownError, | ||||
|     HomeAssistantWSError, | ||||
| ) | ||||
| from ..homeassistant.const import WSType | ||||
| from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit | ||||
| from ..jobs.decorator import Job | ||||
| from ..jobs.job_group import JobGroup | ||||
| from ..mounts.mount import Mount | ||||
| from ..resolution.const import UnhealthyReason | ||||
| from ..utils.common import FileConfiguration | ||||
| from ..utils.dt import utcnow | ||||
| from ..utils.sentinel import DEFAULT | ||||
| from ..utils.sentry import capture_exception | ||||
| from .backup import Backup | ||||
| from .const import BackupType | ||||
| from .const import DEFAULT_FREEZE_TIMEOUT, BackupJobStage, BackupType, RestoreJobStage | ||||
| from .utils import create_slug | ||||
| from .validate import ALL_FOLDERS, SCHEMA_BACKUPS_CONFIG | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def _list_backup_files(path: Path) -> Iterable[Path]: | ||||
|     """Return iterable of backup files, suppress and log OSError for network mounts.""" | ||||
|     try: | ||||
|         # is_dir does a stat syscall which raises if the mount is down | ||||
|         if path.is_dir(): | ||||
|             return path.glob("*.tar") | ||||
|     except OSError as err: | ||||
|         _LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err) | ||||
|  | ||||
|     return [] | ||||
|  | ||||
|  | ||||
| class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
| class BackupManager(FileConfiguration, JobGroup): | ||||
|     """Manage backups.""" | ||||
|  | ||||
|     def __init__(self, coresys): | ||||
|         """Initialize a backup manager.""" | ||||
|         super().__init__(FILE_HASSIO_BACKUPS, SCHEMA_BACKUPS_CONFIG) | ||||
|         self.coresys = coresys | ||||
|         self._backups = {} | ||||
|         self.lock = asyncio.Lock() | ||||
|         super(FileConfiguration, self).__init__(coresys, JOB_GROUP_BACKUP_MANAGER) | ||||
|         self._backups: dict[str, Backup] = {} | ||||
|         self._thaw_task: Awaitable[None] | None = None | ||||
|         self._thaw_event: asyncio.Event = asyncio.Event() | ||||
|  | ||||
|     @property | ||||
|     def list_backups(self) -> set[Backup]: | ||||
| @@ -76,20 +79,64 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|             if mount.state == UnitActiveState.ACTIVE | ||||
|         ] | ||||
|  | ||||
|     def get(self, slug): | ||||
|     def get(self, slug: str) -> Backup: | ||||
|         """Return backup object.""" | ||||
|         return self._backups.get(slug) | ||||
|  | ||||
|     def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path: | ||||
|         """Get base path for backup using location or default location.""" | ||||
|         if location == DEFAULT and self.sys_mounts.default_backup_mount: | ||||
|             location = self.sys_mounts.default_backup_mount | ||||
|  | ||||
|         if location: | ||||
|             if not location.local_where.is_mount(): | ||||
|                 raise BackupMountDownError( | ||||
|                     f"{location.name} is down, cannot back-up to it", _LOGGER.error | ||||
|                 ) | ||||
|             return location.local_where | ||||
|  | ||||
|         if location == DEFAULT and self.sys_mounts.default_backup_mount: | ||||
|             return self.sys_mounts.default_backup_mount.local_where | ||||
|  | ||||
|         return self.sys_config.path_backup | ||||
|  | ||||
|     def _change_stage( | ||||
|         self, | ||||
|         stage: BackupJobStage | RestoreJobStage, | ||||
|         backup: Backup | None = None, | ||||
|     ): | ||||
|         """Change the stage of the current job during backup/restore. | ||||
|  | ||||
|         Must be called from an existing backup/restore job. | ||||
|         """ | ||||
|         job_name = self.sys_jobs.current.name | ||||
|         if "restore" in job_name: | ||||
|             action = "Restore" | ||||
|         elif "freeze" in job_name: | ||||
|             action = "Freeze" | ||||
|         elif "thaw" in job_name: | ||||
|             action = "Thaw" | ||||
|         else: | ||||
|             action = "Backup" | ||||
|  | ||||
|         _LOGGER.info( | ||||
|             "%s %sstarting stage %s", | ||||
|             action, | ||||
|             f"{backup.slug} " if backup else "", | ||||
|             stage, | ||||
|         ) | ||||
|         self.sys_jobs.current.stage = stage | ||||
|  | ||||
|     def _list_backup_files(self, path: Path) -> Iterable[Path]: | ||||
|         """Return iterable of backup files, suppress and log OSError for network mounts.""" | ||||
|         try: | ||||
|             # is_dir does a stat syscall which raises if the mount is down | ||||
|             if path.is_dir(): | ||||
|                 return path.glob("*.tar") | ||||
|         except OSError as err: | ||||
|             if err.errno == errno.EBADMSG and path == self.sys_config.path_backup: | ||||
|                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||
|             _LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err) | ||||
|  | ||||
|         return [] | ||||
|  | ||||
|     def _create_backup( | ||||
|         self, | ||||
|         name: str, | ||||
| @@ -98,48 +145,58 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|         compressed: bool = True, | ||||
|         location: Mount | type[DEFAULT] | None = DEFAULT, | ||||
|     ) -> Backup: | ||||
|         """Initialize a new backup object from name.""" | ||||
|         """Initialize a new backup object from name. | ||||
|  | ||||
|         Must be called from an existing backup job. | ||||
|         """ | ||||
|         date_str = utcnow().isoformat() | ||||
|         slug = create_slug(name, date_str) | ||||
|         tar_file = Path(self._get_base_path(location), f"{slug}.tar") | ||||
|  | ||||
|         # init object | ||||
|         backup = Backup(self.coresys, tar_file) | ||||
|         backup.new(slug, name, date_str, sys_type, password, compressed) | ||||
|         backup = Backup(self.coresys, tar_file, slug) | ||||
|         backup.new(name, date_str, sys_type, password, compressed) | ||||
|  | ||||
|         # Add backup ID to job | ||||
|         self.sys_jobs.current.reference = backup.slug | ||||
|  | ||||
|         self._change_stage(BackupJobStage.ADDON_REPOSITORIES, backup) | ||||
|         backup.store_repositories() | ||||
|         self._change_stage(BackupJobStage.DOCKER_CONFIG, backup) | ||||
|         backup.store_dockerconfig() | ||||
|  | ||||
|         return backup | ||||
|  | ||||
|     def load(self): | ||||
|     def load(self) -> Awaitable[None]: | ||||
|         """Load exists backups data. | ||||
|  | ||||
|         Return a coroutine. | ||||
|         """ | ||||
|         return self.reload() | ||||
|  | ||||
|     async def reload(self): | ||||
|     async def reload(self) -> None: | ||||
|         """Load exists backups.""" | ||||
|         self._backups = {} | ||||
|  | ||||
|         async def _load_backup(tar_file): | ||||
|             """Load the backup.""" | ||||
|             backup = Backup(self.coresys, tar_file) | ||||
|             backup = Backup(self.coresys, tar_file, "temp") | ||||
|             if await backup.load(): | ||||
|                 self._backups[backup.slug] = backup | ||||
|                 self._backups[backup.slug] = Backup( | ||||
|                     self.coresys, tar_file, backup.slug, backup.data | ||||
|                 ) | ||||
|  | ||||
|         tasks = [ | ||||
|             self.sys_create_task(_load_backup(tar_file)) | ||||
|             for path in self.backup_locations | ||||
|             for tar_file in _list_backup_files(path) | ||||
|             for tar_file in self._list_backup_files(path) | ||||
|         ] | ||||
|  | ||||
|         _LOGGER.info("Found %d backup files", len(tasks)) | ||||
|         if tasks: | ||||
|             await asyncio.wait(tasks) | ||||
|  | ||||
|     def remove(self, backup): | ||||
|     def remove(self, backup: Backup) -> bool: | ||||
|         """Remove a backup.""" | ||||
|         try: | ||||
|             backup.tarfile.unlink() | ||||
| @@ -147,14 +204,19 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|             _LOGGER.info("Removed backup file %s", backup.slug) | ||||
|  | ||||
|         except OSError as err: | ||||
|             if ( | ||||
|                 err.errno == errno.EBADMSG | ||||
|                 and backup.tarfile.parent == self.sys_config.path_backup | ||||
|             ): | ||||
|                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||
|             _LOGGER.error("Can't remove backup %s: %s", backup.slug, err) | ||||
|             return False | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     async def import_backup(self, tar_file): | ||||
|     async def import_backup(self, tar_file: Path) -> Backup | None: | ||||
|         """Check backup tarfile and import it.""" | ||||
|         backup = Backup(self.coresys, tar_file) | ||||
|         backup = Backup(self.coresys, tar_file, "temp") | ||||
|  | ||||
|         # Read meta data | ||||
|         if not await backup.load(): | ||||
| @@ -171,11 +233,13 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|             backup.tarfile.rename(tar_origin) | ||||
|  | ||||
|         except OSError as err: | ||||
|             if err.errno == errno.EBADMSG: | ||||
|                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||
|             _LOGGER.error("Can't move backup file to storage: %s", err) | ||||
|             return None | ||||
|  | ||||
|         # Load new backup | ||||
|         backup = Backup(self.coresys, tar_origin) | ||||
|         backup = Backup(self.coresys, tar_origin, backup.slug, backup.data) | ||||
|         if not await backup.load(): | ||||
|             return None | ||||
|         _LOGGER.info("Successfully imported %s", backup.slug) | ||||
| @@ -189,68 +253,117 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|         addon_list: list[Addon], | ||||
|         folder_list: list[str], | ||||
|         homeassistant: bool, | ||||
|     ): | ||||
|         homeassistant_exclude_database: bool | None, | ||||
|     ) -> Backup | None: | ||||
|         """Create a backup. | ||||
|  | ||||
|         Must be called from an existing backup job. | ||||
|         """ | ||||
|         addon_start_tasks: list[Awaitable[None]] | None = None | ||||
|  | ||||
|         try: | ||||
|             self.sys_core.state = CoreState.FREEZE | ||||
|  | ||||
|             async with backup: | ||||
|                 # Backup add-ons | ||||
|                 if addon_list: | ||||
|                     _LOGGER.info("Backing up %s store Add-ons", backup.slug) | ||||
|                     addon_start_tasks = await backup.store_addons(addon_list) | ||||
|  | ||||
|                 # HomeAssistant Folder is for v1 | ||||
|                 if homeassistant: | ||||
|                     await backup.store_homeassistant() | ||||
|                     self._change_stage(BackupJobStage.HOME_ASSISTANT, backup) | ||||
|                     await backup.store_homeassistant( | ||||
|                         self.sys_homeassistant.backups_exclude_database | ||||
|                         if homeassistant_exclude_database is None | ||||
|                         else homeassistant_exclude_database | ||||
|                     ) | ||||
|  | ||||
|                 # Backup add-ons | ||||
|                 if addon_list: | ||||
|                     self._change_stage(BackupJobStage.ADDONS, backup) | ||||
|                     addon_start_tasks = await backup.store_addons(addon_list) | ||||
|  | ||||
|                 # Backup folders | ||||
|                 if folder_list: | ||||
|                     _LOGGER.info("Backing up %s store folders", backup.slug) | ||||
|                     self._change_stage(BackupJobStage.FOLDERS, backup) | ||||
|                     await backup.store_folders(folder_list) | ||||
|  | ||||
|                 self._change_stage(BackupJobStage.FINISHING_FILE, backup) | ||||
|  | ||||
|         except BackupError as err: | ||||
|             self.sys_jobs.current.capture_error(err) | ||||
|             return None | ||||
|         except Exception as err:  # pylint: disable=broad-except | ||||
|             _LOGGER.exception("Backup %s error", backup.slug) | ||||
|             capture_exception(err) | ||||
|             self.sys_jobs.current.capture_error( | ||||
|                 BackupError(f"Backup {backup.slug} error, see supervisor logs") | ||||
|             ) | ||||
|             return None | ||||
|         else: | ||||
|             self._backups[backup.slug] = backup | ||||
|  | ||||
|             if addon_start_tasks: | ||||
|                 self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS, backup) | ||||
|                 # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere | ||||
|                 await asyncio.gather(*addon_start_tasks, return_exceptions=True) | ||||
|  | ||||
|             try: | ||||
|                 await self.sys_homeassistant.websocket.async_send_command( | ||||
|                     { | ||||
|                         ATTR_TYPE: WSType.BACKUP_SYNC, | ||||
|                         ATTR_DATA: { | ||||
|                             ATTR_SLUG: backup.slug, | ||||
|                         }, | ||||
|                     }, | ||||
|                 ) | ||||
|             except HomeAssistantWSError as err: | ||||
|                 _LOGGER.error("Can't send backup sync to Home Assistant: %s", err) | ||||
|  | ||||
|             return backup | ||||
|         finally: | ||||
|             self.sys_core.state = CoreState.RUNNING | ||||
|  | ||||
|     @Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING]) | ||||
|     @Job( | ||||
|         name="backup_manager_full_backup", | ||||
|         conditions=[JobCondition.RUNNING], | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=BackupJobError, | ||||
|         cleanup=False, | ||||
|     ) | ||||
|     async def do_backup_full( | ||||
|         self, | ||||
|         name="", | ||||
|         password=None, | ||||
|         compressed=True, | ||||
|         name: str = "", | ||||
|         password: str | None = None, | ||||
|         compressed: bool = True, | ||||
|         location: Mount | type[DEFAULT] | None = DEFAULT, | ||||
|     ): | ||||
|         homeassistant_exclude_database: bool | None = None, | ||||
|     ) -> Backup | None: | ||||
|         """Create a full backup.""" | ||||
|         if self.lock.locked(): | ||||
|             _LOGGER.error("A backup/restore process is already running") | ||||
|             return None | ||||
|         if self._get_base_path(location) == self.sys_config.path_backup: | ||||
|             await Job.check_conditions( | ||||
|                 self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_full" | ||||
|             ) | ||||
|  | ||||
|         backup = self._create_backup( | ||||
|             name, BackupType.FULL, password, compressed, location | ||||
|         ) | ||||
|  | ||||
|         _LOGGER.info("Creating new full backup with slug %s", backup.slug) | ||||
|         async with self.lock: | ||||
|             backup = await self._do_backup( | ||||
|                 backup, self.sys_addons.installed, ALL_FOLDERS, True | ||||
|             ) | ||||
|             if backup: | ||||
|                 _LOGGER.info("Creating full backup with slug %s completed", backup.slug) | ||||
|             return backup | ||||
|         backup = await self._do_backup( | ||||
|             backup, | ||||
|             self.sys_addons.installed, | ||||
|             ALL_FOLDERS, | ||||
|             True, | ||||
|             homeassistant_exclude_database, | ||||
|         ) | ||||
|         if backup: | ||||
|             _LOGGER.info("Creating full backup with slug %s completed", backup.slug) | ||||
|         return backup | ||||
|  | ||||
|     @Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING]) | ||||
|     @Job( | ||||
|         name="backup_manager_partial_backup", | ||||
|         conditions=[JobCondition.RUNNING], | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=BackupJobError, | ||||
|         cleanup=False, | ||||
|     ) | ||||
|     async def do_backup_partial( | ||||
|         self, | ||||
|         name: str = "", | ||||
| @@ -260,11 +373,13 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|         homeassistant: bool = False, | ||||
|         compressed: bool = True, | ||||
|         location: Mount | type[DEFAULT] | None = DEFAULT, | ||||
|     ): | ||||
|         homeassistant_exclude_database: bool | None = None, | ||||
|     ) -> Backup | None: | ||||
|         """Create a partial backup.""" | ||||
|         if self.lock.locked(): | ||||
|             _LOGGER.error("A backup/restore process is already running") | ||||
|             return None | ||||
|         if self._get_base_path(location) == self.sys_config.path_backup: | ||||
|             await Job.check_conditions( | ||||
|                 self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_partial" | ||||
|             ) | ||||
|  | ||||
|         addons = addons or [] | ||||
|         folders = folders or [] | ||||
| @@ -282,21 +397,20 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|         ) | ||||
|  | ||||
|         _LOGGER.info("Creating new partial backup with slug %s", backup.slug) | ||||
|         async with self.lock: | ||||
|             addon_list = [] | ||||
|             for addon_slug in addons: | ||||
|                 addon = self.sys_addons.get(addon_slug) | ||||
|                 if addon and addon.is_installed: | ||||
|                     addon_list.append(addon) | ||||
|                     continue | ||||
|                 _LOGGER.warning("Add-on %s not found/installed", addon_slug) | ||||
|         addon_list = [] | ||||
|         for addon_slug in addons: | ||||
|             addon = self.sys_addons.get(addon_slug) | ||||
|             if addon and addon.is_installed: | ||||
|                 addon_list.append(addon) | ||||
|                 continue | ||||
|             _LOGGER.warning("Add-on %s not found/installed", addon_slug) | ||||
|  | ||||
|             backup = await self._do_backup(backup, addon_list, folders, homeassistant) | ||||
|             if backup: | ||||
|                 _LOGGER.info( | ||||
|                     "Creating partial backup with slug %s completed", backup.slug | ||||
|                 ) | ||||
|             return backup | ||||
|         backup = await self._do_backup( | ||||
|             backup, addon_list, folders, homeassistant, homeassistant_exclude_database | ||||
|         ) | ||||
|         if backup: | ||||
|             _LOGGER.info("Creating partial backup with slug %s completed", backup.slug) | ||||
|         return backup | ||||
|  | ||||
|     async def _do_restore( | ||||
|         self, | ||||
| @@ -305,126 +419,154 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|         folder_list: list[str], | ||||
|         homeassistant: bool, | ||||
|         replace: bool, | ||||
|     ): | ||||
|     ) -> bool: | ||||
|         """Restore from a backup. | ||||
|  | ||||
|         Must be called from an existing restore job. | ||||
|         """ | ||||
|         addon_start_tasks: list[Awaitable[None]] | None = None | ||||
|         success = True | ||||
|  | ||||
|         try: | ||||
|             task_hass: asyncio.Task | None = None | ||||
|             async with backup: | ||||
|                 # Restore docker config | ||||
|                 _LOGGER.info("Restoring %s Docker config", backup.slug) | ||||
|                 self._change_stage(RestoreJobStage.DOCKER_CONFIG, backup) | ||||
|                 backup.restore_dockerconfig(replace) | ||||
|  | ||||
|                 # Process folders | ||||
|                 if folder_list: | ||||
|                     _LOGGER.info("Restoring %s folders", backup.slug) | ||||
|                     await backup.restore_folders(folder_list) | ||||
|                     self._change_stage(RestoreJobStage.FOLDERS, backup) | ||||
|                     success = await backup.restore_folders(folder_list) | ||||
|  | ||||
|                 # Process Home-Assistant | ||||
|                 if homeassistant: | ||||
|                     _LOGGER.info("Restoring %s Home Assistant Core", backup.slug) | ||||
|                     self._change_stage(RestoreJobStage.HOME_ASSISTANT, backup) | ||||
|                     task_hass = await backup.restore_homeassistant() | ||||
|  | ||||
|                 # Delete delta add-ons | ||||
|                 if replace: | ||||
|                     _LOGGER.info("Removing Add-ons not in the backup %s", backup.slug) | ||||
|                     for addon in self.sys_addons.installed: | ||||
|                         if addon.slug in backup.addon_list: | ||||
|                             continue | ||||
|  | ||||
|                         # Remove Add-on because it's not a part of the new env | ||||
|                         # Do it sequential avoid issue on slow IO | ||||
|                         try: | ||||
|                             await addon.uninstall() | ||||
|                         except AddonsError: | ||||
|                             _LOGGER.warning("Can't uninstall Add-on %s", addon.slug) | ||||
|                     self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup) | ||||
|                     success = success and await backup.remove_delta_addons() | ||||
|  | ||||
|                 if addon_list: | ||||
|                     _LOGGER.info("Restoring %s Repositories", backup.slug) | ||||
|                     self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup) | ||||
|                     await backup.restore_repositories(replace) | ||||
|  | ||||
|                     _LOGGER.info("Restoring %s Add-ons", backup.slug) | ||||
|                     addon_start_tasks = await backup.restore_addons(addon_list) | ||||
|                     self._change_stage(RestoreJobStage.ADDONS, backup) | ||||
|                     restore_success, addon_start_tasks = await backup.restore_addons( | ||||
|                         addon_list | ||||
|                     ) | ||||
|                     success = success and restore_success | ||||
|  | ||||
|                 # Wait for Home Assistant Core update/downgrade | ||||
|                 if task_hass: | ||||
|                     _LOGGER.info("Restore %s wait for Home-Assistant", backup.slug) | ||||
|                     self._change_stage( | ||||
|                         RestoreJobStage.AWAIT_HOME_ASSISTANT_RESTART, backup | ||||
|                     ) | ||||
|                     await task_hass | ||||
|  | ||||
|         except BackupError: | ||||
|             raise | ||||
|         except Exception as err:  # pylint: disable=broad-except | ||||
|             _LOGGER.exception("Restore %s error", backup.slug) | ||||
|             capture_exception(err) | ||||
|             return False | ||||
|             raise BackupError( | ||||
|                 f"Restore {backup.slug} error, see supervisor logs" | ||||
|             ) from err | ||||
|         else: | ||||
|             if addon_start_tasks: | ||||
|                 # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere | ||||
|                 await asyncio.gather(*addon_start_tasks, return_exceptions=True) | ||||
|                 self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup) | ||||
|                 # Failure to resume addons post restore is still a restore failure | ||||
|                 if any( | ||||
|                     await asyncio.gather(*addon_start_tasks, return_exceptions=True) | ||||
|                 ): | ||||
|                     return False | ||||
|  | ||||
|             return True | ||||
|             return success | ||||
|         finally: | ||||
|             # Do we need start Home Assistant Core? | ||||
|             if not await self.sys_homeassistant.core.is_running(): | ||||
|                 await self.sys_homeassistant.core.start() | ||||
|             # Leave Home Assistant alone if it wasn't part of the restore | ||||
|             if homeassistant: | ||||
|                 self._change_stage(RestoreJobStage.CHECK_HOME_ASSISTANT, backup) | ||||
|  | ||||
|             # Check If we can access to API / otherwise restart | ||||
|             if not await self.sys_homeassistant.api.check_api_state(): | ||||
|                 _LOGGER.warning("Need restart HomeAssistant for API") | ||||
|                 await self.sys_homeassistant.core.restart() | ||||
|                 # Do we need start Home Assistant Core? | ||||
|                 if not await self.sys_homeassistant.core.is_running(): | ||||
|                     await self.sys_homeassistant.core.start( | ||||
|                         _job_override__cleanup=False | ||||
|                     ) | ||||
|  | ||||
|                 # Check If we can access to API / otherwise restart | ||||
|                 if not await self.sys_homeassistant.api.check_api_state(): | ||||
|                     _LOGGER.warning("Need restart HomeAssistant for API") | ||||
|                     await self.sys_homeassistant.core.restart( | ||||
|                         _job_override__cleanup=False | ||||
|                     ) | ||||
|  | ||||
|     @Job( | ||||
|         name="backup_manager_full_restore", | ||||
|         conditions=[ | ||||
|             JobCondition.FREE_SPACE, | ||||
|             JobCondition.HEALTHY, | ||||
|             JobCondition.INTERNET_HOST, | ||||
|             JobCondition.INTERNET_SYSTEM, | ||||
|             JobCondition.RUNNING, | ||||
|         ] | ||||
|         ], | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=BackupJobError, | ||||
|         cleanup=False, | ||||
|     ) | ||||
|     async def do_restore_full(self, backup: Backup, password=None): | ||||
|     async def do_restore_full( | ||||
|         self, backup: Backup, password: str | None = None | ||||
|     ) -> bool: | ||||
|         """Restore a backup.""" | ||||
|         if self.lock.locked(): | ||||
|             _LOGGER.error("A backup/restore process is already running") | ||||
|             return False | ||||
|         # Add backup ID to job | ||||
|         self.sys_jobs.current.reference = backup.slug | ||||
|  | ||||
|         if backup.sys_type != BackupType.FULL: | ||||
|             _LOGGER.error("%s is only a partial backup!", backup.slug) | ||||
|             return False | ||||
|             raise BackupInvalidError( | ||||
|                 f"{backup.slug} is only a partial backup!", _LOGGER.error | ||||
|             ) | ||||
|  | ||||
|         if backup.protected and not backup.set_password(password): | ||||
|             _LOGGER.error("Invalid password for backup %s", backup.slug) | ||||
|             return False | ||||
|             raise BackupInvalidError( | ||||
|                 f"Invalid password for backup {backup.slug}", _LOGGER.error | ||||
|             ) | ||||
|  | ||||
|         if backup.supervisor_version > self.sys_supervisor.version: | ||||
|             _LOGGER.error( | ||||
|                 "Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.", | ||||
|                 backup.supervisor_version, | ||||
|                 self.sys_supervisor.version, | ||||
|             raise BackupInvalidError( | ||||
|                 f"Backup was made on supervisor version {backup.supervisor_version}, " | ||||
|                 f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.", | ||||
|                 _LOGGER.error, | ||||
|             ) | ||||
|             return False | ||||
|  | ||||
|         _LOGGER.info("Full-Restore %s start", backup.slug) | ||||
|         async with self.lock: | ||||
|             self.sys_core.state = CoreState.FREEZE | ||||
|         self.sys_core.state = CoreState.FREEZE | ||||
|  | ||||
|         try: | ||||
|             # Stop Home-Assistant / Add-ons | ||||
|             await self.sys_core.shutdown() | ||||
|  | ||||
|             success = await self._do_restore( | ||||
|                 backup, backup.addon_list, backup.folders, True, True | ||||
|             ) | ||||
|  | ||||
|         finally: | ||||
|             self.sys_core.state = CoreState.RUNNING | ||||
|  | ||||
|             if success: | ||||
|                 _LOGGER.info("Full-Restore %s done", backup.slug) | ||||
|         if success: | ||||
|             _LOGGER.info("Full-Restore %s done", backup.slug) | ||||
|         return success | ||||
|  | ||||
|     @Job( | ||||
|         name="backup_manager_partial_restore", | ||||
|         conditions=[ | ||||
|             JobCondition.FREE_SPACE, | ||||
|             JobCondition.HEALTHY, | ||||
|             JobCondition.INTERNET_HOST, | ||||
|             JobCondition.INTERNET_SYSTEM, | ||||
|             JobCondition.RUNNING, | ||||
|         ] | ||||
|         ], | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=BackupJobError, | ||||
|         cleanup=False, | ||||
|     ) | ||||
|     async def do_restore_partial( | ||||
|         self, | ||||
| @@ -433,11 +575,10 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|         addons: list[str] | None = None, | ||||
|         folders: list[Path] | None = None, | ||||
|         password: str | None = None, | ||||
|     ): | ||||
|     ) -> bool: | ||||
|         """Restore a backup.""" | ||||
|         if self.lock.locked(): | ||||
|             _LOGGER.error("A backup/restore process is already running") | ||||
|             return False | ||||
|         # Add backup ID to job | ||||
|         self.sys_jobs.current.reference = backup.slug | ||||
|  | ||||
|         addon_list = addons or [] | ||||
|         folder_list = folders or [] | ||||
| @@ -448,30 +589,118 @@ class BackupManager(FileConfiguration, CoreSysAttributes): | ||||
|             homeassistant = True | ||||
|  | ||||
|         if backup.protected and not backup.set_password(password): | ||||
|             _LOGGER.error("Invalid password for backup %s", backup.slug) | ||||
|             return False | ||||
|             raise BackupInvalidError( | ||||
|                 f"Invalid password for backup {backup.slug}", _LOGGER.error | ||||
|             ) | ||||
|  | ||||
|         if backup.homeassistant is None and homeassistant: | ||||
|             _LOGGER.error("No Home Assistant Core data inside the backup") | ||||
|             return False | ||||
|             raise BackupInvalidError( | ||||
|                 "No Home Assistant Core data inside the backup", _LOGGER.error | ||||
|             ) | ||||
|  | ||||
|         if backup.supervisor_version > self.sys_supervisor.version: | ||||
|             _LOGGER.error( | ||||
|                 "Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.", | ||||
|                 backup.supervisor_version, | ||||
|                 self.sys_supervisor.version, | ||||
|             raise BackupInvalidError( | ||||
|                 f"Backup was made on supervisor version {backup.supervisor_version}, " | ||||
|                 f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.", | ||||
|                 _LOGGER.error, | ||||
|             ) | ||||
|             return False | ||||
|  | ||||
|         _LOGGER.info("Partial-Restore %s start", backup.slug) | ||||
|         async with self.lock: | ||||
|             self.sys_core.state = CoreState.FREEZE | ||||
|         self.sys_core.state = CoreState.FREEZE | ||||
|  | ||||
|         try: | ||||
|             success = await self._do_restore( | ||||
|                 backup, addon_list, folder_list, homeassistant, False | ||||
|             ) | ||||
|  | ||||
|         finally: | ||||
|             self.sys_core.state = CoreState.RUNNING | ||||
|  | ||||
|             if success: | ||||
|                 _LOGGER.info("Partial-Restore %s done", backup.slug) | ||||
|         if success: | ||||
|             _LOGGER.info("Partial-Restore %s done", backup.slug) | ||||
|         return success | ||||
|  | ||||
|     @Job( | ||||
|         name="backup_manager_freeze_all", | ||||
|         conditions=[JobCondition.RUNNING], | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=BackupJobError, | ||||
|     ) | ||||
|     async def freeze_all(self, timeout: float = DEFAULT_FREEZE_TIMEOUT) -> None: | ||||
|         """Freeze system to prepare for an external backup such as an image snapshot.""" | ||||
|         self.sys_core.state = CoreState.FREEZE | ||||
|  | ||||
|         # Determine running addons | ||||
|         installed = self.sys_addons.installed.copy() | ||||
|         is_running: list[bool] = await asyncio.gather( | ||||
|             *[addon.is_running() for addon in installed] | ||||
|         ) | ||||
|         running_addons = [ | ||||
|             installed[ind] for ind in range(len(installed)) if is_running[ind] | ||||
|         ] | ||||
|  | ||||
|         # Create thaw task first to ensure we eventually undo freezes even if the below fails | ||||
|         self._thaw_task = asyncio.shield( | ||||
|             self.sys_create_task(self._thaw_all(running_addons, timeout)) | ||||
|         ) | ||||
|  | ||||
|         # Tell Home Assistant to freeze for a backup | ||||
|         self._change_stage(BackupJobStage.HOME_ASSISTANT) | ||||
|         await self.sys_homeassistant.begin_backup() | ||||
|  | ||||
|         # Run all pre-backup tasks for addons | ||||
|         self._change_stage(BackupJobStage.ADDONS) | ||||
|         await asyncio.gather(*[addon.begin_backup() for addon in running_addons]) | ||||
|  | ||||
|     @Job( | ||||
|         name="backup_manager_thaw_all", | ||||
|         conditions=[JobCondition.FROZEN], | ||||
|         on_condition=BackupJobError, | ||||
|     ) | ||||
|     async def _thaw_all( | ||||
|         self, running_addons: list[Addon], timeout: float = DEFAULT_FREEZE_TIMEOUT | ||||
|     ) -> None: | ||||
|         """Thaw system after user signal or timeout.""" | ||||
|         try: | ||||
|             try: | ||||
|                 await asyncio.wait_for(self._thaw_event.wait(), timeout) | ||||
|             except TimeoutError: | ||||
|                 _LOGGER.warning( | ||||
|                     "Timeout waiting for signal to thaw after manual freeze, beginning thaw now" | ||||
|                 ) | ||||
|  | ||||
|             self._change_stage(BackupJobStage.HOME_ASSISTANT) | ||||
|             await self.sys_homeassistant.end_backup() | ||||
|  | ||||
|             self._change_stage(BackupJobStage.ADDONS) | ||||
|             addon_start_tasks: list[asyncio.Task] = [ | ||||
|                 task | ||||
|                 for task in await asyncio.gather( | ||||
|                     *[addon.end_backup() for addon in running_addons] | ||||
|                 ) | ||||
|                 if task | ||||
|             ] | ||||
|         finally: | ||||
|             self.sys_core.state = CoreState.RUNNING | ||||
|             self._thaw_event.clear() | ||||
|             self._thaw_task = None | ||||
|  | ||||
|         if addon_start_tasks: | ||||
|             self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS) | ||||
|             await asyncio.gather(*addon_start_tasks, return_exceptions=True) | ||||
|  | ||||
|     @Job( | ||||
|         name="backup_manager_signal_thaw", | ||||
|         conditions=[JobCondition.FROZEN], | ||||
|         limit=JobExecutionLimit.GROUP_ONCE, | ||||
|         on_condition=BackupJobError, | ||||
|         internal=True, | ||||
|     ) | ||||
|     async def thaw_all(self) -> None: | ||||
|         """Signal thaw task to begin unfreezing the system.""" | ||||
|         if not self._thaw_task: | ||||
|             raise BackupError( | ||||
|                 "Freeze was not initiated by freeze API, cannot thaw this way" | ||||
|             ) | ||||
|  | ||||
|         self._thaw_event.set() | ||||
|         await self._thaw_task | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Util add-on functions.""" | ||||
|  | ||||
| import hashlib | ||||
| import re | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Validate some things around restore.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from typing import Any | ||||
| @@ -14,6 +15,7 @@ from ..const import ( | ||||
|     ATTR_DATE, | ||||
|     ATTR_DAYS_UNTIL_STALE, | ||||
|     ATTR_DOCKER, | ||||
|     ATTR_EXCLUDE_DATABASE, | ||||
|     ATTR_FOLDERS, | ||||
|     ATTR_HOMEASSISTANT, | ||||
|     ATTR_NAME, | ||||
| @@ -52,7 +54,7 @@ def unique_addons(addons_list): | ||||
|  | ||||
|  | ||||
| def v1_homeassistant( | ||||
|     homeassistant_data: dict[str, Any] | None | ||||
|     homeassistant_data: dict[str, Any] | None, | ||||
| ) -> dict[str, Any] | None: | ||||
|     """Cleanup homeassistant artefacts from v1.""" | ||||
|     if not homeassistant_data: | ||||
| @@ -103,6 +105,9 @@ SCHEMA_BACKUP = vol.Schema( | ||||
|                     { | ||||
|                         vol.Required(ATTR_VERSION): version_tag, | ||||
|                         vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float), | ||||
|                         vol.Optional( | ||||
|                             ATTR_EXCLUDE_DATABASE, default=False | ||||
|                         ): vol.Boolean(), | ||||
|                     }, | ||||
|                     extra=vol.REMOVE_EXTRA, | ||||
|                 ) | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| """Bootstrap Supervisor.""" | ||||
|  | ||||
| # ruff: noqa: T100 | ||||
| import logging | ||||
| import os | ||||
| from pathlib import Path | ||||
| @@ -6,7 +8,7 @@ import signal | ||||
|  | ||||
| from colorlog import ColoredFormatter | ||||
|  | ||||
| from .addons import AddonManager | ||||
| from .addons.manager import AddonManager | ||||
| from .api import RestAPI | ||||
| from .arch import CpuArch | ||||
| from .auth import Auth | ||||
| @@ -115,7 +117,7 @@ async def initialize_coresys() -> CoreSys: | ||||
|         _LOGGER.warning( | ||||
|             "Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!" | ||||
|         ) | ||||
|     _LOGGER.info("Seting up coresys for machine: %s", coresys.machine) | ||||
|     _LOGGER.info("Setting up coresys for machine: %s", coresys.machine) | ||||
|  | ||||
|     return coresys | ||||
|  | ||||
| @@ -221,6 +223,14 @@ def initialize_system(coresys: CoreSys) -> None: | ||||
|         ) | ||||
|         config.path_emergency.mkdir() | ||||
|  | ||||
|     # Addon Configs folder | ||||
|     if not config.path_addon_configs.is_dir(): | ||||
|         _LOGGER.debug( | ||||
|             "Creating Supervisor add-on configs folder at '%s'", | ||||
|             config.path_addon_configs, | ||||
|         ) | ||||
|         config.path_addon_configs.mkdir() | ||||
|  | ||||
|  | ||||
| def migrate_system_env(coresys: CoreSys) -> None: | ||||
|     """Cleanup some stuff after update.""" | ||||
| @@ -248,9 +258,11 @@ def migrate_system_env(coresys: CoreSys) -> None: | ||||
| def initialize_logging() -> None: | ||||
|     """Initialize the logging.""" | ||||
|     logging.basicConfig(level=logging.INFO) | ||||
|     fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s" | ||||
|     fmt = ( | ||||
|         "%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s" | ||||
|     ) | ||||
|     colorfmt = f"%(log_color)s{fmt}%(reset)s" | ||||
|     datefmt = "%y-%m-%d %H:%M:%S" | ||||
|     datefmt = "%Y-%m-%d %H:%M:%S" | ||||
|  | ||||
|     # suppress overly verbose logs from libraries that aren't helpful | ||||
|     logging.getLogger("aiohttp.access").setLevel(logging.WARNING) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Bus event system.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Awaitable, Callable | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| """Bootstrap Supervisor.""" | ||||
| from datetime import datetime | ||||
|  | ||||
| from datetime import UTC, datetime | ||||
| import logging | ||||
| import os | ||||
| from pathlib import Path, PurePath | ||||
| @@ -48,8 +49,9 @@ MEDIA_DATA = PurePath("media") | ||||
| MOUNTS_FOLDER = PurePath("mounts") | ||||
| MOUNTS_CREDENTIALS = PurePath(".mounts_credentials") | ||||
| EMERGENCY_DATA = PurePath("emergency") | ||||
| ADDON_CONFIGS = PurePath("addon_configs") | ||||
|  | ||||
| DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat() | ||||
| DEFAULT_BOOT_TIME = datetime.fromtimestamp(0, UTC).isoformat() | ||||
|  | ||||
| # We filter out UTC because it's the system default fallback | ||||
| # Core also not respect the cotnainer timezone and reset timezones | ||||
| @@ -153,7 +155,7 @@ class CoreConfig(FileConfiguration): | ||||
|  | ||||
|     def modify_log_level(self) -> None: | ||||
|         """Change log level.""" | ||||
|         lvl = getattr(logging, str(self.logging.value).upper()) | ||||
|         lvl = getattr(logging, self.logging.value.upper()) | ||||
|         logging.getLogger("supervisor").setLevel(lvl) | ||||
|  | ||||
|     @property | ||||
| @@ -163,7 +165,7 @@ class CoreConfig(FileConfiguration): | ||||
|  | ||||
|         boot_time = parse_datetime(boot_str) | ||||
|         if not boot_time: | ||||
|             return datetime.utcfromtimestamp(1) | ||||
|             return datetime.fromtimestamp(1, UTC) | ||||
|         return boot_time | ||||
|  | ||||
|     @last_boot.setter | ||||
| @@ -231,6 +233,16 @@ class CoreConfig(FileConfiguration): | ||||
|         """Return root add-on data folder external for Docker.""" | ||||
|         return PurePath(self.path_extern_supervisor, ADDONS_DATA) | ||||
|  | ||||
|     @property | ||||
|     def path_addon_configs(self) -> Path: | ||||
|         """Return root Add-on configs folder.""" | ||||
|         return self.path_supervisor / ADDON_CONFIGS | ||||
|  | ||||
|     @property | ||||
|     def path_extern_addon_configs(self) -> PurePath: | ||||
|         """Return root Add-on configs folder external for Docker.""" | ||||
|         return PurePath(self.path_extern_supervisor, ADDON_CONFIGS) | ||||
|  | ||||
|     @property | ||||
|     def path_audio(self) -> Path: | ||||
|         """Return root audio data folder.""" | ||||
|   | ||||
| @@ -1,8 +1,11 @@ | ||||
| """Constants file for Supervisor.""" | ||||
| from enum import Enum | ||||
|  | ||||
| from dataclasses import dataclass | ||||
| from enum import StrEnum | ||||
| from ipaddress import ip_network | ||||
| from pathlib import Path | ||||
| from sys import version_info as systemversion | ||||
| from typing import Self | ||||
|  | ||||
| from aiohttp import __version__ as aiohttpversion | ||||
|  | ||||
| @@ -18,6 +21,7 @@ SUPERVISOR_DATA = Path("/data") | ||||
| FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json") | ||||
| FILE_HASSIO_AUTH = Path(SUPERVISOR_DATA, "auth.json") | ||||
| FILE_HASSIO_BACKUPS = Path(SUPERVISOR_DATA, "backups.json") | ||||
| FILE_HASSIO_BOARD = Path(SUPERVISOR_DATA, "board.json") | ||||
| FILE_HASSIO_CONFIG = Path(SUPERVISOR_DATA, "config.json") | ||||
| FILE_HASSIO_DISCOVERY = Path(SUPERVISOR_DATA, "discovery.json") | ||||
| FILE_HASSIO_DOCKER = Path(SUPERVISOR_DATA, "docker.json") | ||||
| @@ -65,10 +69,14 @@ META_SUPERVISOR = "supervisor" | ||||
| JSON_DATA = "data" | ||||
| JSON_MESSAGE = "message" | ||||
| JSON_RESULT = "result" | ||||
| JSON_JOB_ID = "job_id" | ||||
|  | ||||
| RESULT_ERROR = "error" | ||||
| RESULT_OK = "ok" | ||||
|  | ||||
| HEADER_REMOTE_USER_ID = "X-Remote-User-Id" | ||||
| HEADER_REMOTE_USER_NAME = "X-Remote-User-Name" | ||||
| HEADER_REMOTE_USER_DISPLAY_NAME = "X-Remote-User-Display-Name" | ||||
| HEADER_TOKEN_OLD = "X-Hassio-Key" | ||||
| HEADER_TOKEN = "X-Supervisor-Token" | ||||
|  | ||||
| @@ -84,6 +92,7 @@ REQUEST_FROM = "HASSIO_FROM" | ||||
| ATTR_ACCESS_TOKEN = "access_token" | ||||
| ATTR_ACCESSPOINTS = "accesspoints" | ||||
| ATTR_ACTIVE = "active" | ||||
| ATTR_ACTIVITY_LED = "activity_led" | ||||
| ATTR_ADDON = "addon" | ||||
| ATTR_ADDONS = "addons" | ||||
| ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list" | ||||
| @@ -109,6 +118,7 @@ ATTR_BACKUP_EXCLUDE = "backup_exclude" | ||||
| ATTR_BACKUP_POST = "backup_post" | ||||
| ATTR_BACKUP_PRE = "backup_pre" | ||||
| ATTR_BACKUPS = "backups" | ||||
| ATTR_BACKUPS_EXCLUDE_DATABASE = "backups_exclude_database" | ||||
| ATTR_BLK_READ = "blk_read" | ||||
| ATTR_BLK_WRITE = "blk_write" | ||||
| ATTR_BOARD = "board" | ||||
| @@ -148,9 +158,11 @@ ATTR_DIAGNOSTICS = "diagnostics" | ||||
| ATTR_DISCOVERY = "discovery" | ||||
| ATTR_DISK = "disk" | ||||
| ATTR_DISK_FREE = "disk_free" | ||||
| ATTR_DISK_LED = "disk_led" | ||||
| ATTR_DISK_LIFE_TIME = "disk_life_time" | ||||
| ATTR_DISK_TOTAL = "disk_total" | ||||
| ATTR_DISK_USED = "disk_used" | ||||
| ATTR_DISPLAYNAME = "displayname" | ||||
| ATTR_DNS = "dns" | ||||
| ATTR_DOCKER = "docker" | ||||
| ATTR_DOCKER_API = "docker_api" | ||||
| @@ -160,6 +172,7 @@ ATTR_ENABLE = "enable" | ||||
| ATTR_ENABLED = "enabled" | ||||
| ATTR_ENVIRONMENT = "environment" | ||||
| ATTR_EVENT = "event" | ||||
| ATTR_EXCLUDE_DATABASE = "exclude_database" | ||||
| ATTR_FEATURES = "features" | ||||
| ATTR_FILENAME = "filename" | ||||
| ATTR_FLAGS = "flags" | ||||
| @@ -173,7 +186,9 @@ ATTR_HASSIO_API = "hassio_api" | ||||
| ATTR_HASSIO_ROLE = "hassio_role" | ||||
| ATTR_HASSOS = "hassos" | ||||
| ATTR_HEALTHY = "healthy" | ||||
| ATTR_HEARTBEAT_LED = "heartbeat_led" | ||||
| ATTR_HOMEASSISTANT = "homeassistant" | ||||
| ATTR_HOMEASSISTANT_EXCLUDE_DATABASE = "homeassistant_exclude_database" | ||||
| ATTR_HOMEASSISTANT_API = "homeassistant_api" | ||||
| ATTR_HOST = "host" | ||||
| ATTR_HOST_DBUS = "host_dbus" | ||||
| @@ -248,6 +263,7 @@ ATTR_PLUGINS = "plugins" | ||||
| ATTR_PORT = "port" | ||||
| ATTR_PORTS = "ports" | ||||
| ATTR_PORTS_DESCRIPTION = "ports_description" | ||||
| ATTR_POWER_LED = "power_led" | ||||
| ATTR_PREFIX = "prefix" | ||||
| ATTR_PRIMARY = "primary" | ||||
| ATTR_PRIORITY = "priority" | ||||
| @@ -271,6 +287,9 @@ ATTR_SERVERS = "servers" | ||||
| ATTR_SERVICE = "service" | ||||
| ATTR_SERVICES = "services" | ||||
| ATTR_SESSION = "session" | ||||
| ATTR_SESSION_DATA = "session_data" | ||||
| ATTR_SESSION_DATA_USER = "user" | ||||
| ATTR_SESSION_DATA_USER_ID = "user_id" | ||||
| ATTR_SIGNAL = "signal" | ||||
| ATTR_SIZE = "size" | ||||
| ATTR_SLUG = "slug" | ||||
| @@ -291,6 +310,8 @@ ATTR_SUPERVISOR_VERSION = "supervisor_version" | ||||
| ATTR_SUPPORTED = "supported" | ||||
| ATTR_SUPPORTED_ARCH = "supported_arch" | ||||
| ATTR_SYSTEM = "system" | ||||
| ATTR_SYSTEM_MANAGED = "system_managed" | ||||
| ATTR_SYSTEM_MANAGED_CONFIG_ENTRY = "system_managed_config_entry" | ||||
| ATTR_TIMEOUT = "timeout" | ||||
| ATTR_TIMEZONE = "timezone" | ||||
| ATTR_TITLE = "title" | ||||
| @@ -308,11 +329,13 @@ ATTR_UPDATE_KEY = "update_key" | ||||
| ATTR_URL = "url" | ||||
| ATTR_USB = "usb" | ||||
| ATTR_USER = "user" | ||||
| ATTR_USER_LED = "user_led" | ||||
| ATTR_USERNAME = "username" | ||||
| ATTR_UUID = "uuid" | ||||
| ATTR_VALID = "valid" | ||||
| ATTR_VALUE = "value" | ||||
| ATTR_VERSION = "version" | ||||
| ATTR_VERSION_TIMESTAMP = "version_timestamp" | ||||
| ATTR_VERSION_LATEST = "version_latest" | ||||
| ATTR_VIDEO = "video" | ||||
| ATTR_VLAN = "vlan" | ||||
| @@ -327,14 +350,6 @@ PROVIDE_SERVICE = "provide" | ||||
| NEED_SERVICE = "need" | ||||
| WANT_SERVICE = "want" | ||||
|  | ||||
|  | ||||
| MAP_CONFIG = "config" | ||||
| MAP_SSL = "ssl" | ||||
| MAP_ADDONS = "addons" | ||||
| MAP_BACKUP = "backup" | ||||
| MAP_SHARE = "share" | ||||
| MAP_MEDIA = "media" | ||||
|  | ||||
| ARCH_ARMHF = "armhf" | ||||
| ARCH_ARMV7 = "armv7" | ||||
| ARCH_AARCH64 = "aarch64" | ||||
| @@ -367,14 +382,29 @@ ROLE_ADMIN = "admin" | ||||
| ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN] | ||||
|  | ||||
|  | ||||
| class AddonBoot(str, Enum): | ||||
| class AddonBootConfig(StrEnum): | ||||
|     """Boot mode config for the add-on.""" | ||||
|  | ||||
|     AUTO = "auto" | ||||
|     MANUAL = "manual" | ||||
|     MANUAL_ONLY = "manual_only" | ||||
|  | ||||
|  | ||||
| class AddonBoot(StrEnum): | ||||
|     """Boot mode for the add-on.""" | ||||
|  | ||||
|     AUTO = "auto" | ||||
|     MANUAL = "manual" | ||||
|  | ||||
|     @classmethod | ||||
|     def _missing_(cls, value: str) -> Self | None: | ||||
|         """Convert 'forced' config values to their counterpart.""" | ||||
|         if value == AddonBootConfig.MANUAL_ONLY: | ||||
|             return AddonBoot.MANUAL | ||||
|         return None | ||||
|  | ||||
| class AddonStartup(str, Enum): | ||||
|  | ||||
| class AddonStartup(StrEnum): | ||||
|     """Startup types of Add-on.""" | ||||
|  | ||||
|     INITIALIZE = "initialize" | ||||
| @@ -384,7 +414,7 @@ class AddonStartup(str, Enum): | ||||
|     ONCE = "once" | ||||
|  | ||||
|  | ||||
| class AddonStage(str, Enum): | ||||
| class AddonStage(StrEnum): | ||||
|     """Stage types of add-on.""" | ||||
|  | ||||
|     STABLE = "stable" | ||||
| @@ -392,7 +422,7 @@ class AddonStage(str, Enum): | ||||
|     DEPRECATED = "deprecated" | ||||
|  | ||||
|  | ||||
| class AddonState(str, Enum): | ||||
| class AddonState(StrEnum): | ||||
|     """State of add-on.""" | ||||
|  | ||||
|     STARTUP = "startup" | ||||
| @@ -402,7 +432,7 @@ class AddonState(str, Enum): | ||||
|     ERROR = "error" | ||||
|  | ||||
|  | ||||
| class UpdateChannel(str, Enum): | ||||
| class UpdateChannel(StrEnum): | ||||
|     """Core supported update channels.""" | ||||
|  | ||||
|     STABLE = "stable" | ||||
| @@ -410,7 +440,7 @@ class UpdateChannel(str, Enum): | ||||
|     DEV = "dev" | ||||
|  | ||||
|  | ||||
| class CoreState(str, Enum): | ||||
| class CoreState(StrEnum): | ||||
|     """Represent current loading state.""" | ||||
|  | ||||
|     INITIALIZE = "initialize" | ||||
| @@ -423,7 +453,7 @@ class CoreState(str, Enum): | ||||
|     CLOSE = "close" | ||||
|  | ||||
|  | ||||
| class LogLevel(str, Enum): | ||||
| class LogLevel(StrEnum): | ||||
|     """Logging level of system.""" | ||||
|  | ||||
|     DEBUG = "debug" | ||||
| @@ -433,7 +463,7 @@ class LogLevel(str, Enum): | ||||
|     CRITICAL = "critical" | ||||
|  | ||||
|  | ||||
| class HostFeature(str, Enum): | ||||
| class HostFeature(StrEnum): | ||||
|     """Host feature.""" | ||||
|  | ||||
|     HASSOS = "hassos" | ||||
| @@ -445,16 +475,18 @@ class HostFeature(str, Enum): | ||||
|     TIMEDATE = "timedate" | ||||
|  | ||||
|  | ||||
| class BusEvent(str, Enum): | ||||
| class BusEvent(StrEnum): | ||||
|     """Bus event type.""" | ||||
|  | ||||
|     DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change" | ||||
|     HARDWARE_NEW_DEVICE = "hardware_new_device" | ||||
|     HARDWARE_REMOVE_DEVICE = "hardware_remove_device" | ||||
|     DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change" | ||||
|     SUPERVISOR_JOB_END = "supervisor_job_end" | ||||
|     SUPERVISOR_JOB_START = "supervisor_job_start" | ||||
|     SUPERVISOR_STATE_CHANGE = "supervisor_state_change" | ||||
|  | ||||
|  | ||||
| class CpuArch(str, Enum): | ||||
| class CpuArch(StrEnum): | ||||
|     """Supported CPU architectures.""" | ||||
|  | ||||
|     ARMV7 = "armv7" | ||||
| @@ -464,6 +496,48 @@ class CpuArch(str, Enum): | ||||
|     AMD64 = "amd64" | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class IngressSessionDataUser: | ||||
|     """Format of an IngressSessionDataUser object.""" | ||||
|  | ||||
|     id: str | ||||
|     display_name: str | None = None | ||||
|     username: str | None = None | ||||
|  | ||||
|     def to_dict(self) -> dict[str, str | None]: | ||||
|         """Get dictionary representation.""" | ||||
|         return { | ||||
|             ATTR_ID: self.id, | ||||
|             ATTR_DISPLAYNAME: self.display_name, | ||||
|             ATTR_USERNAME: self.username, | ||||
|         } | ||||
|  | ||||
|     @classmethod | ||||
|     def from_dict(cls, data: dict[str, str | None]) -> Self: | ||||
|         """Return object from dictionary representation.""" | ||||
|         return cls( | ||||
|             id=data[ATTR_ID], | ||||
|             display_name=data.get(ATTR_DISPLAYNAME), | ||||
|             username=data.get(ATTR_USERNAME), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class IngressSessionData: | ||||
|     """Format of an IngressSessionData object.""" | ||||
|  | ||||
|     user: IngressSessionDataUser | ||||
|  | ||||
|     def to_dict(self) -> dict[str, dict[str, str | None]]: | ||||
|         """Get dictionary representation.""" | ||||
|         return {ATTR_USER: self.user.to_dict()} | ||||
|  | ||||
|     @classmethod | ||||
|     def from_dict(cls, data: dict[str, dict[str, str | None]]) -> Self: | ||||
|         """Return object from dictionary representation.""" | ||||
|         return cls(user=IngressSessionDataUser.from_dict(data[ATTR_USER])) | ||||
|  | ||||
|  | ||||
| STARTING_STATES = [ | ||||
|     CoreState.INITIALIZE, | ||||
|     CoreState.STARTUP, | ||||
|   | ||||
| @@ -1,12 +1,11 @@ | ||||
| """Main file for Supervisor.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| from contextlib import suppress | ||||
| from datetime import timedelta | ||||
| import logging | ||||
|  | ||||
| import async_timeout | ||||
|  | ||||
| from .const import ( | ||||
|     ATTR_STARTUP, | ||||
|     RUN_SUPERVISOR_STATE, | ||||
| @@ -28,7 +27,7 @@ from .homeassistant.core import LANDINGPAGE | ||||
| from .resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason | ||||
| from .utils.dt import utcnow | ||||
| from .utils.sentry import capture_exception | ||||
| from .utils.whoami import retrieve_whoami | ||||
| from .utils.whoami import WhoamiData, retrieve_whoami | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -63,20 +62,23 @@ class Core(CoreSysAttributes): | ||||
|         if self._state == new_state: | ||||
|             return | ||||
|         try: | ||||
|             RUN_SUPERVISOR_STATE.write_text(new_state.value, encoding="utf-8") | ||||
|             RUN_SUPERVISOR_STATE.write_text(new_state, encoding="utf-8") | ||||
|         except OSError as err: | ||||
|             _LOGGER.warning( | ||||
|                 "Can't update the Supervisor state to %s: %s", new_state, err | ||||
|             ) | ||||
|         finally: | ||||
|             self._state = new_state | ||||
|             self.sys_bus.fire_event(BusEvent.SUPERVISOR_STATE_CHANGE, new_state) | ||||
|  | ||||
|             # These will be received by HA after startup has completed which won't make sense | ||||
|             if new_state not in STARTING_STATES: | ||||
|                 self.sys_homeassistant.websocket.supervisor_update_event( | ||||
|                     "info", {"state": new_state} | ||||
|                 ) | ||||
|             # Don't attempt to notify anyone on CLOSE as we're about to stop the event loop | ||||
|             if new_state != CoreState.CLOSE: | ||||
|                 self.sys_bus.fire_event(BusEvent.SUPERVISOR_STATE_CHANGE, new_state) | ||||
|  | ||||
|                 # These will be received by HA after startup has completed which won't make sense | ||||
|                 if new_state not in STARTING_STATES: | ||||
|                     self.sys_homeassistant.websocket.supervisor_update_event( | ||||
|                         "info", {"state": new_state} | ||||
|                     ) | ||||
|  | ||||
|     async def connect(self): | ||||
|         """Connect Supervisor container.""" | ||||
| @@ -132,10 +134,10 @@ class Core(CoreSysAttributes): | ||||
|             self.sys_mounts.load(), | ||||
|             # Load Docker manager | ||||
|             self.sys_docker.load(), | ||||
|             # Load Plugins container | ||||
|             self.sys_plugins.load(), | ||||
|             # load last available data | ||||
|             self.sys_updater.load(), | ||||
|             # Load Plugins container | ||||
|             self.sys_plugins.load(), | ||||
|             # Load Home Assistant | ||||
|             self.sys_homeassistant.load(), | ||||
|             # Load CPU/Arch | ||||
| @@ -176,7 +178,15 @@ class Core(CoreSysAttributes): | ||||
|             and not self.sys_dev | ||||
|             and self.supported | ||||
|         ): | ||||
|             self.sys_dbus.agent.diagnostics = self.sys_config.diagnostics | ||||
|             try: | ||||
|                 await self.sys_dbus.agent.set_diagnostics(self.sys_config.diagnostics) | ||||
|             except Exception as err:  # pylint: disable=broad-except | ||||
|                 _LOGGER.warning( | ||||
|                     "Could not set diagnostics to %s due to %s", | ||||
|                     self.sys_config.diagnostics, | ||||
|                     err, | ||||
|                 ) | ||||
|                 capture_exception(err) | ||||
|  | ||||
|         # Evaluate the system | ||||
|         await self.sys_resolution.evaluate.evaluate_system() | ||||
| @@ -247,7 +257,7 @@ class Core(CoreSysAttributes): | ||||
|                 except HomeAssistantError as err: | ||||
|                     capture_exception(err) | ||||
|             else: | ||||
|                 _LOGGER.info("Skiping start of Home Assistant") | ||||
|                 _LOGGER.info("Skipping start of Home Assistant") | ||||
|  | ||||
|             # Core is not running | ||||
|             if self.sys_homeassistant.core.error_state: | ||||
| @@ -295,7 +305,7 @@ class Core(CoreSysAttributes): | ||||
|  | ||||
|         # Stage 1 | ||||
|         try: | ||||
|             async with async_timeout.timeout(10): | ||||
|             async with asyncio.timeout(10): | ||||
|                 await asyncio.wait( | ||||
|                     [ | ||||
|                         self.sys_create_task(coro) | ||||
| @@ -306,12 +316,12 @@ class Core(CoreSysAttributes): | ||||
|                         ) | ||||
|                     ] | ||||
|                 ) | ||||
|         except asyncio.TimeoutError: | ||||
|         except TimeoutError: | ||||
|             _LOGGER.warning("Stage 1: Force Shutdown!") | ||||
|  | ||||
|         # Stage 2 | ||||
|         try: | ||||
|             async with async_timeout.timeout(10): | ||||
|             async with asyncio.timeout(10): | ||||
|                 await asyncio.wait( | ||||
|                     [ | ||||
|                         self.sys_create_task(coro) | ||||
| @@ -323,7 +333,7 @@ class Core(CoreSysAttributes): | ||||
|                         ) | ||||
|                     ] | ||||
|                 ) | ||||
|         except asyncio.TimeoutError: | ||||
|         except TimeoutError: | ||||
|             _LOGGER.warning("Stage 2: Force Shutdown!") | ||||
|  | ||||
|         self.state = CoreState.CLOSE | ||||
| @@ -336,9 +346,6 @@ class Core(CoreSysAttributes): | ||||
|         if self.state == CoreState.RUNNING: | ||||
|             self.state = CoreState.SHUTDOWN | ||||
|  | ||||
|         # Stop docker monitoring | ||||
|         await self.sys_docker.unload() | ||||
|  | ||||
|         # Shutdown Application Add-ons, using Home Assistant API | ||||
|         await self.sys_addons.shutdown(AddonStartup.APPLICATION) | ||||
|  | ||||
| @@ -360,6 +367,13 @@ class Core(CoreSysAttributes): | ||||
|         self.sys_config.last_boot = self.sys_hardware.helper.last_boot | ||||
|         self.sys_config.save_data() | ||||
|  | ||||
|     async def _retrieve_whoami(self, with_ssl: bool) -> WhoamiData | None: | ||||
|         try: | ||||
|             return await retrieve_whoami(self.sys_websession, with_ssl) | ||||
|         except WhoamiSSLError: | ||||
|             _LOGGER.info("Whoami service SSL error") | ||||
|             return None | ||||
|  | ||||
|     async def _adjust_system_datetime(self): | ||||
|         """Adjust system time/date on startup.""" | ||||
|         # If no timezone is detect or set | ||||
| @@ -372,21 +386,15 @@ class Core(CoreSysAttributes): | ||||
|  | ||||
|         # Get Timezone data | ||||
|         try: | ||||
|             data = await retrieve_whoami(self.sys_websession) | ||||
|         except WhoamiSSLError: | ||||
|             pass | ||||
|             data = await self._retrieve_whoami(True) | ||||
|  | ||||
|             # SSL Date Issue & possible time drift | ||||
|             if not data: | ||||
|                 data = await self._retrieve_whoami(False) | ||||
|         except WhoamiError as err: | ||||
|             _LOGGER.warning("Can't adjust Time/Date settings: %s", err) | ||||
|             return | ||||
|  | ||||
|         # SSL Date Issue & possible time drift | ||||
|         if not data: | ||||
|             try: | ||||
|                 data = await retrieve_whoami(self.sys_websession, with_ssl=False) | ||||
|             except WhoamiError as err: | ||||
|                 _LOGGER.error("Can't adjust Time/Date settings: %s", err) | ||||
|                 return | ||||
|  | ||||
|         self.sys_config.timezone = self.sys_config.timezone or data.timezone | ||||
|  | ||||
|         # Calculate if system time is out of sync | ||||
|   | ||||
| @@ -1,8 +1,10 @@ | ||||
| """Handle core shared data.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Callable, Coroutine | ||||
| from contextvars import Context, copy_context | ||||
| from datetime import datetime | ||||
| from functools import partial | ||||
| import logging | ||||
| @@ -17,7 +19,7 @@ from .const import ENV_SUPERVISOR_DEV, SERVER_SOFTWARE | ||||
| from .utils.dt import UTC, get_time_zone | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .addons import AddonManager | ||||
|     from .addons.manager import AddonManager | ||||
|     from .api import RestAPI | ||||
|     from .arch import CpuArch | ||||
|     from .auth import Auth | ||||
| @@ -61,7 +63,7 @@ class CoreSys: | ||||
|  | ||||
|         # External objects | ||||
|         self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop() | ||||
|         self._websession: aiohttp.ClientSession = aiohttp.ClientSession() | ||||
|         self._websession = None | ||||
|  | ||||
|         # Global objects | ||||
|         self._config: CoreConfig = CoreConfig() | ||||
| @@ -94,10 +96,11 @@ class CoreSys: | ||||
|         self._bus: Bus | None = None | ||||
|         self._mounts: MountManager | None = None | ||||
|  | ||||
|         # Set default header for aiohttp | ||||
|         self._websession._default_headers = MappingProxyType( | ||||
|             {aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE} | ||||
|         ) | ||||
|         # Setup aiohttp session | ||||
|         self.create_websession() | ||||
|  | ||||
|         # Task factory attributes | ||||
|         self._set_task_context: list[Callable[[Context], Context]] = [] | ||||
|  | ||||
|     @property | ||||
|     def dev(self) -> bool: | ||||
| @@ -109,8 +112,11 @@ class CoreSys: | ||||
|         """Return system timezone.""" | ||||
|         if self.config.timezone: | ||||
|             return self.config.timezone | ||||
|         # pylint bug with python 3.12.4 (https://github.com/pylint-dev/pylint/issues/9811) | ||||
|         # pylint: disable=no-member | ||||
|         if self.host.info.timezone: | ||||
|             return self.host.info.timezone | ||||
|         # pylint: enable=no-member | ||||
|         return "UTC" | ||||
|  | ||||
|     @property | ||||
| @@ -520,6 +526,17 @@ class CoreSys: | ||||
|         """Return now in local timezone.""" | ||||
|         return datetime.now(get_time_zone(self.timezone) or UTC) | ||||
|  | ||||
|     def add_set_task_context_callback( | ||||
|         self, callback: Callable[[Context], Context] | ||||
|     ) -> None: | ||||
|         """Add callback used to modify context prior to creating a task. | ||||
|  | ||||
|         Only used for tasks created via CoreSys.create_task. Callback can modify the provided | ||||
|         context using context.run (ex. `context.run(var.set, "new_value")`). Callback should | ||||
|         return the context to be provided to task. | ||||
|         """ | ||||
|         self._set_task_context.append(callback) | ||||
|  | ||||
|     def run_in_executor( | ||||
|         self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any] | ||||
|     ) -> Coroutine[Any, Any, T]: | ||||
| @@ -529,9 +546,54 @@ class CoreSys: | ||||
|  | ||||
|         return self.loop.run_in_executor(None, funct, *args) | ||||
|  | ||||
|     def create_websession(self) -> None: | ||||
|         """Create a new aiohttp session.""" | ||||
|         if self._websession: | ||||
|             self.create_task(self._websession.close()) | ||||
|  | ||||
|         # Create session and set default header for aiohttp | ||||
|         self._websession: aiohttp.ClientSession = aiohttp.ClientSession( | ||||
|             headers=MappingProxyType({aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE}) | ||||
|         ) | ||||
|  | ||||
|     def _create_context(self) -> Context: | ||||
|         """Create a new context for a task.""" | ||||
|         context = copy_context() | ||||
|         for callback in self._set_task_context: | ||||
|             context = callback(context) | ||||
|         return context | ||||
|  | ||||
|     def create_task(self, coroutine: Coroutine) -> asyncio.Task: | ||||
|         """Create an async task.""" | ||||
|         return self.loop.create_task(coroutine) | ||||
|         return self.loop.create_task(coroutine, context=self._create_context()) | ||||
|  | ||||
|     def call_later( | ||||
|         self, | ||||
|         delay: float, | ||||
|         funct: Callable[..., Coroutine[Any, Any, T]], | ||||
|         *args: tuple[Any], | ||||
|         **kwargs: dict[str, Any], | ||||
|     ) -> asyncio.TimerHandle: | ||||
|         """Start a task after a delay.""" | ||||
|         if kwargs: | ||||
|             funct = partial(funct, **kwargs) | ||||
|  | ||||
|         return self.loop.call_later(delay, funct, *args, context=self._create_context()) | ||||
|  | ||||
|     def call_at( | ||||
|         self, | ||||
|         when: datetime, | ||||
|         funct: Callable[..., Coroutine[Any, Any, T]], | ||||
|         *args: tuple[Any], | ||||
|         **kwargs: dict[str, Any], | ||||
|     ) -> asyncio.TimerHandle: | ||||
|         """Start a task at the specified datetime.""" | ||||
|         if kwargs: | ||||
|             funct = partial(funct, **kwargs) | ||||
|  | ||||
|         return self.loop.call_at( | ||||
|             when.timestamp(), funct, *args, context=self._create_context() | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class CoreSysAttributes: | ||||
| @@ -706,9 +768,29 @@ class CoreSysAttributes: | ||||
|     def sys_run_in_executor( | ||||
|         self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any] | ||||
|     ) -> Coroutine[Any, Any, T]: | ||||
|         """Add an job to the executor pool.""" | ||||
|         """Add a job to the executor pool.""" | ||||
|         return self.coresys.run_in_executor(funct, *args, **kwargs) | ||||
|  | ||||
|     def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task: | ||||
|         """Create an async task.""" | ||||
|         return self.coresys.create_task(coroutine) | ||||
|  | ||||
|     def sys_call_later( | ||||
|         self, | ||||
|         delay: float, | ||||
|         funct: Callable[..., Coroutine[Any, Any, T]], | ||||
|         *args: tuple[Any], | ||||
|         **kwargs: dict[str, Any], | ||||
|     ) -> asyncio.TimerHandle: | ||||
|         """Start a task after a delay.""" | ||||
|         return self.coresys.call_later(delay, funct, *args, **kwargs) | ||||
|  | ||||
|     def sys_call_at( | ||||
|         self, | ||||
|         when: datetime, | ||||
|         funct: Callable[..., Coroutine[Any, Any, T]], | ||||
|         *args: tuple[Any], | ||||
|         **kwargs: dict[str, Any], | ||||
|     ) -> asyncio.TimerHandle: | ||||
|         """Start a task at the specified datetime.""" | ||||
|         return self.coresys.call_at(when, funct, *args, **kwargs) | ||||
|   | ||||
| @@ -5,7 +5,9 @@ | ||||
|   "raspberrypi3-64": ["aarch64", "armv7", "armhf"], | ||||
|   "raspberrypi4": ["armv7", "armhf"], | ||||
|   "raspberrypi4-64": ["aarch64", "armv7", "armhf"], | ||||
|   "raspberrypi5-64": ["aarch64", "armv7", "armhf"], | ||||
|   "yellow": ["aarch64", "armv7", "armhf"], | ||||
|   "green": ["aarch64", "armv7", "armhf"], | ||||
|   "tinker": ["armv7", "armhf"], | ||||
|   "odroid-c2": ["aarch64", "armv7", "armhf"], | ||||
|   "odroid-c4": ["aarch64", "armv7", "armhf"], | ||||
|   | ||||
| @@ -1,12 +1,14 @@ | ||||
| """OS-Agent implementation for DBUS.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from awesomeversion import AwesomeVersion | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ...exceptions import DBusError, DBusInterfaceError | ||||
| from ...exceptions import DBusInterfaceError, DBusServiceUnkownError | ||||
| from ..const import ( | ||||
|     DBUS_ATTR_DIAGNOSTICS, | ||||
|     DBUS_ATTR_VERSION, | ||||
| @@ -80,11 +82,9 @@ class OSAgent(DBusInterfaceProxy): | ||||
|         """Return if diagnostics is enabled on OS-Agent.""" | ||||
|         return self.properties[DBUS_ATTR_DIAGNOSTICS] | ||||
|  | ||||
|     @diagnostics.setter | ||||
|     @dbus_property | ||||
|     def diagnostics(self, value: bool) -> None: | ||||
|     def set_diagnostics(self, value: bool) -> Awaitable[None]: | ||||
|         """Enable or disable OS-Agent diagnostics.""" | ||||
|         asyncio.create_task(self.dbus.set_diagnostics(value)) | ||||
|         return self.dbus.set_diagnostics(value) | ||||
|  | ||||
|     @property | ||||
|     def all(self) -> list[DBusInterface]: | ||||
| @@ -96,13 +96,25 @@ class OSAgent(DBusInterfaceProxy): | ||||
|         _LOGGER.info("Load dbus interface %s", self.name) | ||||
|         try: | ||||
|             await super().connect(bus) | ||||
|             await asyncio.gather(*[dbus.connect(bus) for dbus in self.all]) | ||||
|         except DBusError: | ||||
|             _LOGGER.warning("Can't connect to OS-Agent") | ||||
|         except DBusInterfaceError: | ||||
|             _LOGGER.warning( | ||||
|         except (DBusServiceUnkownError, DBusInterfaceError): | ||||
|             _LOGGER.error( | ||||
|                 "No OS-Agent support on the host. Some Host functions have been disabled." | ||||
|             ) | ||||
|             return | ||||
|  | ||||
|         errors = await asyncio.gather( | ||||
|             *[dbus.connect(bus) for dbus in self.all], return_exceptions=True | ||||
|         ) | ||||
|  | ||||
|         for err in errors: | ||||
|             if err: | ||||
|                 dbus = self.all[errors.index(err)] | ||||
|                 _LOGGER.error( | ||||
|                     "Can't load OS Agent dbus interface %s %s: %s", | ||||
|                     dbus.bus_name, | ||||
|                     dbus.object_path, | ||||
|                     err, | ||||
|                 ) | ||||
|  | ||||
|     @dbus_connected | ||||
|     async def update(self, changed: dict[str, Any] | None = None) -> None: | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """AppArmor object for OS-Agent.""" | ||||
|  | ||||
| from pathlib import Path | ||||
|  | ||||
| from awesomeversion import AwesomeVersion | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| """Board management for OS Agent.""" | ||||
|  | ||||
| import logging | ||||
|  | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ....exceptions import BoardInvalidError | ||||
| from ....exceptions import BoardInvalidError, DBusInterfaceError, DBusServiceUnkownError | ||||
| from ...const import ( | ||||
|     DBUS_ATTR_BOARD, | ||||
|     DBUS_IFACE_HAOS_BOARDS, | ||||
| @@ -11,7 +12,8 @@ from ...const import ( | ||||
|     DBUS_OBJECT_HAOS_BOARDS, | ||||
| ) | ||||
| from ...interface import DBusInterfaceProxy, dbus_property | ||||
| from .const import BOARD_NAME_SUPERVISED, BOARD_NAME_YELLOW | ||||
| from .const import BOARD_NAME_GREEN, BOARD_NAME_SUPERVISED, BOARD_NAME_YELLOW | ||||
| from .green import Green | ||||
| from .interface import BoardProxy | ||||
| from .supervised import Supervised | ||||
| from .yellow import Yellow | ||||
| @@ -39,6 +41,14 @@ class BoardManager(DBusInterfaceProxy): | ||||
|         """Get board name.""" | ||||
|         return self.properties[DBUS_ATTR_BOARD] | ||||
|  | ||||
|     @property | ||||
|     def green(self) -> Green: | ||||
|         """Get Green board.""" | ||||
|         if self.board != BOARD_NAME_GREEN: | ||||
|             raise BoardInvalidError("Green board is not in use", _LOGGER.error) | ||||
|  | ||||
|         return self._board_proxy | ||||
|  | ||||
|     @property | ||||
|     def supervised(self) -> Supervised: | ||||
|         """Get Supervised board.""" | ||||
| @@ -61,8 +71,14 @@ class BoardManager(DBusInterfaceProxy): | ||||
|  | ||||
|         if self.board == BOARD_NAME_YELLOW: | ||||
|             self._board_proxy = Yellow() | ||||
|         elif self.board == BOARD_NAME_GREEN: | ||||
|             self._board_proxy = Green() | ||||
|         elif self.board == BOARD_NAME_SUPERVISED: | ||||
|             self._board_proxy = Supervised() | ||||
|         else: | ||||
|             return | ||||
|  | ||||
|         if self._board_proxy: | ||||
|         try: | ||||
|             await self._board_proxy.connect(bus) | ||||
|         except (DBusServiceUnkownError, DBusInterfaceError) as ex: | ||||
|             _LOGGER.warning("OS-Agent board support initialization failed: %s", ex) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Constants for boards.""" | ||||
|  | ||||
| BOARD_NAME_GREEN = "Green" | ||||
| BOARD_NAME_SUPERVISED = "Supervised" | ||||
| BOARD_NAME_YELLOW = "Yellow" | ||||
|   | ||||
							
								
								
									
										65
									
								
								supervisor/dbus/agent/boards/green.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								supervisor/dbus/agent/boards/green.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | ||||
| """Green board management.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
|  | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ....const import ATTR_ACTIVITY_LED, ATTR_POWER_LED, ATTR_USER_LED | ||||
| from ...const import DBUS_ATTR_ACTIVITY_LED, DBUS_ATTR_POWER_LED, DBUS_ATTR_USER_LED | ||||
| from ...interface import dbus_property | ||||
| from .const import BOARD_NAME_GREEN | ||||
| from .interface import BoardProxy | ||||
| from .validate import SCHEMA_GREEN_BOARD | ||||
|  | ||||
|  | ||||
| class Green(BoardProxy): | ||||
|     """Green board manager object.""" | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize properties.""" | ||||
|         super().__init__(BOARD_NAME_GREEN, SCHEMA_GREEN_BOARD) | ||||
|  | ||||
|     @property | ||||
|     @dbus_property | ||||
|     def activity_led(self) -> bool: | ||||
|         """Get activity LED enabled.""" | ||||
|         return self.properties[DBUS_ATTR_ACTIVITY_LED] | ||||
|  | ||||
|     def set_activity_led(self, enabled: bool) -> Awaitable[None]: | ||||
|         """Enable/disable activity LED.""" | ||||
|         self._data[ATTR_ACTIVITY_LED] = enabled | ||||
|         return self.dbus.Boards.Green.set_activity_led(enabled) | ||||
|  | ||||
|     @property | ||||
|     @dbus_property | ||||
|     def power_led(self) -> bool: | ||||
|         """Get power LED enabled.""" | ||||
|         return self.properties[DBUS_ATTR_POWER_LED] | ||||
|  | ||||
|     def set_power_led(self, enabled: bool) -> Awaitable[None]: | ||||
|         """Enable/disable power LED.""" | ||||
|         self._data[ATTR_POWER_LED] = enabled | ||||
|         return self.dbus.Boards.Green.set_power_led(enabled) | ||||
|  | ||||
|     @property | ||||
|     @dbus_property | ||||
|     def user_led(self) -> bool: | ||||
|         """Get user LED enabled.""" | ||||
|         return self.properties[DBUS_ATTR_USER_LED] | ||||
|  | ||||
|     def set_user_led(self, enabled: bool) -> Awaitable[None]: | ||||
|         """Enable/disable disk LED.""" | ||||
|         self._data[ATTR_USER_LED] = enabled | ||||
|         return self.dbus.Boards.Green.set_user_led(enabled) | ||||
|  | ||||
|     async def connect(self, bus: MessageBus) -> None: | ||||
|         """Connect to D-Bus.""" | ||||
|         await super().connect(bus) | ||||
|  | ||||
|         # Set LEDs based on settings on connect | ||||
|         await asyncio.gather( | ||||
|             self.set_activity_led(self._data[ATTR_ACTIVITY_LED]), | ||||
|             self.set_power_led(self._data[ATTR_POWER_LED]), | ||||
|             self.set_user_led(self._data[ATTR_USER_LED]), | ||||
|         ) | ||||
| @@ -1,17 +1,23 @@ | ||||
| """Board dbus proxy interface.""" | ||||
|  | ||||
| from voluptuous import Schema | ||||
|  | ||||
| from ....const import FILE_HASSIO_BOARD | ||||
| from ....utils.common import FileConfiguration | ||||
| from ...const import DBUS_IFACE_HAOS_BOARDS, DBUS_NAME_HAOS, DBUS_OBJECT_HAOS_BOARDS | ||||
| from ...interface import DBusInterfaceProxy | ||||
| from .validate import SCHEMA_BASE_BOARD | ||||
|  | ||||
|  | ||||
| class BoardProxy(DBusInterfaceProxy): | ||||
| class BoardProxy(FileConfiguration, DBusInterfaceProxy): | ||||
|     """DBus interface proxy for os board.""" | ||||
|  | ||||
|     bus_name: str = DBUS_NAME_HAOS | ||||
|  | ||||
|     def __init__(self, name: str) -> None: | ||||
|     def __init__(self, name: str, file_schema: Schema | None = None) -> None: | ||||
|         """Initialize properties.""" | ||||
|         super().__init__() | ||||
|         super().__init__(FILE_HASSIO_BOARD, file_schema or SCHEMA_BASE_BOARD) | ||||
|         super(FileConfiguration, self).__init__() | ||||
|  | ||||
|         self._name: str = name | ||||
|         self.object_path: str = f"{DBUS_OBJECT_HAOS_BOARDS}/{name}" | ||||
|   | ||||
| @@ -1,5 +1,9 @@ | ||||
| """Supervised board management.""" | ||||
|  | ||||
| from typing import Any | ||||
|  | ||||
| from supervisor.dbus.utils import dbus_connected | ||||
|  | ||||
| from .const import BOARD_NAME_SUPERVISED | ||||
| from .interface import BoardProxy | ||||
|  | ||||
| @@ -11,3 +15,11 @@ class Supervised(BoardProxy): | ||||
|         """Initialize properties.""" | ||||
|         super().__init__(BOARD_NAME_SUPERVISED) | ||||
|         self.sync_properties: bool = False | ||||
|  | ||||
|     @dbus_connected | ||||
|     async def update(self, changed: dict[str, Any] | None = None) -> None: | ||||
|         """Do nothing as there are no properties. | ||||
|  | ||||
|         Currently unused, avoid using the Properties interface to avoid a bug in | ||||
|         Go D-Bus, see: https://github.com/home-assistant/os-agent/issues/206 | ||||
|         """ | ||||
|   | ||||
							
								
								
									
										32
									
								
								supervisor/dbus/agent/boards/validate.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								supervisor/dbus/agent/boards/validate.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| """Validation for board config.""" | ||||
|  | ||||
| import voluptuous as vol | ||||
|  | ||||
| from ....const import ( | ||||
|     ATTR_ACTIVITY_LED, | ||||
|     ATTR_DISK_LED, | ||||
|     ATTR_HEARTBEAT_LED, | ||||
|     ATTR_POWER_LED, | ||||
|     ATTR_USER_LED, | ||||
| ) | ||||
|  | ||||
| # pylint: disable=no-value-for-parameter | ||||
| SCHEMA_BASE_BOARD = vol.Schema({}, extra=vol.REMOVE_EXTRA) | ||||
|  | ||||
| SCHEMA_GREEN_BOARD = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_ACTIVITY_LED, default=True): vol.Boolean(), | ||||
|         vol.Optional(ATTR_POWER_LED, default=True): vol.Boolean(), | ||||
|         vol.Optional(ATTR_USER_LED, default=True): vol.Boolean(), | ||||
|     }, | ||||
|     extra=vol.REMOVE_EXTRA, | ||||
| ) | ||||
|  | ||||
| SCHEMA_YELLOW_BOARD = vol.Schema( | ||||
|     { | ||||
|         vol.Optional(ATTR_DISK_LED, default=True): vol.Boolean(), | ||||
|         vol.Optional(ATTR_HEARTBEAT_LED, default=True): vol.Boolean(), | ||||
|         vol.Optional(ATTR_POWER_LED, default=True): vol.Boolean(), | ||||
|     }, | ||||
|     extra=vol.REMOVE_EXTRA, | ||||
| ) | ||||
| @@ -1,11 +1,16 @@ | ||||
| """Yellow board management.""" | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import Awaitable | ||||
|  | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ....const import ATTR_DISK_LED, ATTR_HEARTBEAT_LED, ATTR_POWER_LED | ||||
| from ...const import DBUS_ATTR_DISK_LED, DBUS_ATTR_HEARTBEAT_LED, DBUS_ATTR_POWER_LED | ||||
| from ...interface import dbus_property | ||||
| from .const import BOARD_NAME_YELLOW | ||||
| from .interface import BoardProxy | ||||
| from .validate import SCHEMA_YELLOW_BOARD | ||||
|  | ||||
|  | ||||
| class Yellow(BoardProxy): | ||||
| @@ -13,7 +18,7 @@ class Yellow(BoardProxy): | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize properties.""" | ||||
|         super().__init__(BOARD_NAME_YELLOW) | ||||
|         super().__init__(BOARD_NAME_YELLOW, SCHEMA_YELLOW_BOARD) | ||||
|  | ||||
|     @property | ||||
|     @dbus_property | ||||
| @@ -21,10 +26,10 @@ class Yellow(BoardProxy): | ||||
|         """Get heartbeat LED enabled.""" | ||||
|         return self.properties[DBUS_ATTR_HEARTBEAT_LED] | ||||
|  | ||||
|     @heartbeat_led.setter | ||||
|     def heartbeat_led(self, enabled: bool) -> None: | ||||
|     def set_heartbeat_led(self, enabled: bool) -> Awaitable[None]: | ||||
|         """Enable/disable heartbeat LED.""" | ||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled)) | ||||
|         self._data[ATTR_HEARTBEAT_LED] = enabled | ||||
|         return self.dbus.Boards.Yellow.set_heartbeat_led(enabled) | ||||
|  | ||||
|     @property | ||||
|     @dbus_property | ||||
| @@ -32,10 +37,10 @@ class Yellow(BoardProxy): | ||||
|         """Get power LED enabled.""" | ||||
|         return self.properties[DBUS_ATTR_POWER_LED] | ||||
|  | ||||
|     @power_led.setter | ||||
|     def power_led(self, enabled: bool) -> None: | ||||
|     def set_power_led(self, enabled: bool) -> Awaitable[None]: | ||||
|         """Enable/disable power LED.""" | ||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled)) | ||||
|         self._data[ATTR_POWER_LED] = enabled | ||||
|         return self.dbus.Boards.Yellow.set_power_led(enabled) | ||||
|  | ||||
|     @property | ||||
|     @dbus_property | ||||
| @@ -43,7 +48,18 @@ class Yellow(BoardProxy): | ||||
|         """Get disk LED enabled.""" | ||||
|         return self.properties[DBUS_ATTR_DISK_LED] | ||||
|  | ||||
|     @disk_led.setter | ||||
|     def disk_led(self, enabled: bool) -> None: | ||||
|     def set_disk_led(self, enabled: bool) -> Awaitable[None]: | ||||
|         """Enable/disable disk LED.""" | ||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled)) | ||||
|         self._data[ATTR_DISK_LED] = enabled | ||||
|         return self.dbus.Boards.Yellow.set_disk_led(enabled) | ||||
|  | ||||
|     async def connect(self, bus: MessageBus) -> None: | ||||
|         """Connect to D-Bus.""" | ||||
|         await super().connect(bus) | ||||
|  | ||||
|         # Set LEDs based on settings on connect | ||||
|         await asyncio.gather( | ||||
|             self.set_disk_led(self._data[ATTR_DISK_LED]), | ||||
|             self.set_heartbeat_led(self._data[ATTR_HEARTBEAT_LED]), | ||||
|             self.set_power_led(self._data[ATTR_POWER_LED]), | ||||
|         ) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """DataDisk object for OS-Agent.""" | ||||
|  | ||||
| from pathlib import Path | ||||
|  | ||||
| from ..const import ( | ||||
|   | ||||
| @@ -12,6 +12,6 @@ class System(DBusInterface): | ||||
|     object_path: str = DBUS_OBJECT_HAOS_SYSTEM | ||||
|  | ||||
|     @dbus_connected | ||||
|     async def schedule_wipe_device(self) -> None: | ||||
|     async def schedule_wipe_device(self) -> bool: | ||||
|         """Schedule a factory reset on next system boot.""" | ||||
|         await self.dbus.System.call_schedule_wipe_device() | ||||
|         return await self.dbus.System.call_schedule_wipe_device() | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| """Constants for DBUS.""" | ||||
| from enum import Enum, IntEnum | ||||
|  | ||||
| from enum import IntEnum, StrEnum | ||||
| from socket import AF_INET, AF_INET6 | ||||
|  | ||||
| DBUS_NAME_HAOS = "io.hass.os" | ||||
| @@ -36,12 +37,14 @@ DBUS_IFACE_RAUC_INSTALLER = "de.pengutronix.rauc.Installer" | ||||
| DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager" | ||||
| DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection" | ||||
| DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager" | ||||
| DBUS_IFACE_SYSTEMD_UNIT = "org.freedesktop.systemd1.Unit" | ||||
| DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1" | ||||
| DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager" | ||||
|  | ||||
| DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = ( | ||||
|     "org.freedesktop.NetworkManager.Connection.Active.StateChanged" | ||||
| ) | ||||
| DBUS_SIGNAL_PROPERTIES_CHANGED = "org.freedesktop.DBus.Properties.PropertiesChanged" | ||||
| DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed" | ||||
|  | ||||
| DBUS_OBJECT_BASE = "/" | ||||
| @@ -59,11 +62,14 @@ DBUS_OBJECT_RESOLVED = "/org/freedesktop/resolve1" | ||||
| DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings" | ||||
| DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1" | ||||
| DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1" | ||||
| DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2/Manager" | ||||
| DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2" | ||||
| DBUS_OBJECT_UDISKS2_MANAGER = "/org/freedesktop/UDisks2/Manager" | ||||
|  | ||||
| DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint" | ||||
| DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection" | ||||
| DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections" | ||||
| DBUS_ATTR_ACTIVE_STATE = "ActiveState" | ||||
| DBUS_ATTR_ACTIVITY_LED = "ActivityLED" | ||||
| DBUS_ATTR_ADDRESS_DATA = "AddressData" | ||||
| DBUS_ATTR_BITRATE = "Bitrate" | ||||
| DBUS_ATTR_BOARD = "Board" | ||||
| @@ -144,6 +150,7 @@ DBUS_ATTR_OPERATION = "Operation" | ||||
| DBUS_ATTR_OPTIONS = "Options" | ||||
| DBUS_ATTR_PARSER_VERSION = "ParserVersion" | ||||
| DBUS_ATTR_PARTITIONS = "Partitions" | ||||
| DBUS_ATTR_PATH = "Path" | ||||
| DBUS_ATTR_POWER_LED = "PowerLED" | ||||
| DBUS_ATTR_PRIMARY_CONNECTION = "PrimaryConnection" | ||||
| DBUS_ATTR_READ_ONLY = "ReadOnly" | ||||
| @@ -168,19 +175,21 @@ DBUS_ATTR_TIMEUSEC = "TimeUSec" | ||||
| DBUS_ATTR_TIMEZONE = "Timezone" | ||||
| DBUS_ATTR_TRANSACTION_STATISTICS = "TransactionStatistics" | ||||
| DBUS_ATTR_TYPE = "Type" | ||||
| DBUS_ATTR_USER_LED = "UserLED" | ||||
| DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC = "UserspaceTimestampMonotonic" | ||||
| DBUS_ATTR_UUID_UPPERCASE = "UUID" | ||||
| DBUS_ATTR_UUID = "Uuid" | ||||
| DBUS_ATTR_VARIANT = "Variant" | ||||
| DBUS_ATTR_VENDOR = "Vendor" | ||||
| DBUS_ATTR_VERSION = "Version" | ||||
| DBUS_ATTR_VIRTUALIZATION = "Virtualization" | ||||
| DBUS_ATTR_WHAT = "What" | ||||
| DBUS_ATTR_WWN = "WWN" | ||||
|  | ||||
| DBUS_ERR_SYSTEMD_NO_SUCH_UNIT = "org.freedesktop.systemd1.NoSuchUnit" | ||||
|  | ||||
|  | ||||
| class RaucState(str, Enum): | ||||
| class RaucState(StrEnum): | ||||
|     """Rauc slot states.""" | ||||
|  | ||||
|     GOOD = "good" | ||||
| @@ -188,7 +197,7 @@ class RaucState(str, Enum): | ||||
|     ACTIVE = "active" | ||||
|  | ||||
|  | ||||
| class InterfaceMethod(str, Enum): | ||||
| class InterfaceMethod(StrEnum): | ||||
|     """Interface method simple.""" | ||||
|  | ||||
|     AUTO = "auto" | ||||
| @@ -197,14 +206,14 @@ class InterfaceMethod(str, Enum): | ||||
|     LINK_LOCAL = "link-local" | ||||
|  | ||||
|  | ||||
| class ConnectionType(str, Enum): | ||||
| class ConnectionType(StrEnum): | ||||
|     """Connection type.""" | ||||
|  | ||||
|     ETHERNET = "802-3-ethernet" | ||||
|     WIRELESS = "802-11-wireless" | ||||
|  | ||||
|  | ||||
| class ConnectionStateType(int, Enum): | ||||
| class ConnectionStateType(IntEnum): | ||||
|     """Connection states. | ||||
|  | ||||
|     https://developer.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMActiveConnectionState | ||||
| @@ -217,7 +226,7 @@ class ConnectionStateType(int, Enum): | ||||
|     DEACTIVATED = 4 | ||||
|  | ||||
|  | ||||
| class ConnectionStateFlags(int, Enum): | ||||
| class ConnectionStateFlags(IntEnum): | ||||
|     """Connection state flags. | ||||
|  | ||||
|     https://developer-old.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMActivationStateFlags | ||||
| @@ -234,7 +243,7 @@ class ConnectionStateFlags(int, Enum): | ||||
|     EXTERNAL = 0x80 | ||||
|  | ||||
|  | ||||
| class ConnectivityState(int, Enum): | ||||
| class ConnectivityState(IntEnum): | ||||
|     """Network connectvity. | ||||
|  | ||||
|     https://developer.gnome.org/NetworkManager/unstable/nm-dbus-types.html#NMConnectivityState | ||||
| @@ -247,7 +256,7 @@ class ConnectivityState(int, Enum): | ||||
|     CONNECTIVITY_FULL = 4 | ||||
|  | ||||
|  | ||||
| class DeviceType(int, Enum): | ||||
| class DeviceType(IntEnum): | ||||
|     """Device types. | ||||
|  | ||||
|     https://developer.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMDeviceType | ||||
| @@ -262,7 +271,7 @@ class DeviceType(int, Enum): | ||||
|     VETH = 20 | ||||
|  | ||||
|  | ||||
| class WirelessMethodType(int, Enum): | ||||
| class WirelessMethodType(IntEnum): | ||||
|     """Device Type.""" | ||||
|  | ||||
|     UNKNOWN = 0 | ||||
| @@ -279,7 +288,7 @@ class DNSAddressFamily(IntEnum): | ||||
|     INET6 = AF_INET6 | ||||
|  | ||||
|  | ||||
| class MulticastProtocolEnabled(str, Enum): | ||||
| class MulticastProtocolEnabled(StrEnum): | ||||
|     """Multicast protocol enabled or resolve.""" | ||||
|  | ||||
|     YES = "yes" | ||||
| @@ -287,7 +296,7 @@ class MulticastProtocolEnabled(str, Enum): | ||||
|     RESOLVE = "resolve" | ||||
|  | ||||
|  | ||||
| class DNSOverTLSEnabled(str, Enum): | ||||
| class DNSOverTLSEnabled(StrEnum): | ||||
|     """DNS over TLS enabled.""" | ||||
|  | ||||
|     YES = "yes" | ||||
| @@ -295,7 +304,7 @@ class DNSOverTLSEnabled(str, Enum): | ||||
|     OPPORTUNISTIC = "opportunistic" | ||||
|  | ||||
|  | ||||
| class DNSSECValidation(str, Enum): | ||||
| class DNSSECValidation(StrEnum): | ||||
|     """DNSSEC validation enforced.""" | ||||
|  | ||||
|     YES = "yes" | ||||
| @@ -303,7 +312,7 @@ class DNSSECValidation(str, Enum): | ||||
|     ALLOW_DOWNGRADE = "allow-downgrade" | ||||
|  | ||||
|  | ||||
| class DNSStubListenerEnabled(str, Enum): | ||||
| class DNSStubListenerEnabled(StrEnum): | ||||
|     """DNS stub listener enabled.""" | ||||
|  | ||||
|     YES = "yes" | ||||
| @@ -312,7 +321,7 @@ class DNSStubListenerEnabled(str, Enum): | ||||
|     UDP_ONLY = "udp" | ||||
|  | ||||
|  | ||||
| class ResolvConfMode(str, Enum): | ||||
| class ResolvConfMode(StrEnum): | ||||
|     """Resolv.conf management mode.""" | ||||
|  | ||||
|     FOREIGN = "foreign" | ||||
| @@ -322,7 +331,7 @@ class ResolvConfMode(str, Enum): | ||||
|     UPLINK = "uplink" | ||||
|  | ||||
|  | ||||
| class StopUnitMode(str, Enum): | ||||
| class StopUnitMode(StrEnum): | ||||
|     """Mode for stopping the unit.""" | ||||
|  | ||||
|     REPLACE = "replace" | ||||
| @@ -331,7 +340,7 @@ class StopUnitMode(str, Enum): | ||||
|     IGNORE_REQUIREMENTS = "ignore-requirements" | ||||
|  | ||||
|  | ||||
| class StartUnitMode(str, Enum): | ||||
| class StartUnitMode(StrEnum): | ||||
|     """Mode for starting the unit.""" | ||||
|  | ||||
|     REPLACE = "replace" | ||||
| @@ -341,7 +350,7 @@ class StartUnitMode(str, Enum): | ||||
|     ISOLATE = "isolate" | ||||
|  | ||||
|  | ||||
| class UnitActiveState(str, Enum): | ||||
| class UnitActiveState(StrEnum): | ||||
|     """Active state of a systemd unit.""" | ||||
|  | ||||
|     ACTIVE = "active" | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| """D-Bus interface for hostname.""" | ||||
|  | ||||
| import logging | ||||
|  | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ..exceptions import DBusError, DBusInterfaceError | ||||
| from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||
| from .const import ( | ||||
|     DBUS_ATTR_CHASSIS, | ||||
|     DBUS_ATTR_DEPLOYMENT, | ||||
| @@ -39,7 +40,7 @@ class Hostname(DBusInterfaceProxy): | ||||
|             await super().connect(bus) | ||||
|         except DBusError: | ||||
|             _LOGGER.warning("Can't connect to systemd-hostname") | ||||
|         except DBusInterfaceError: | ||||
|         except (DBusServiceUnkownError, DBusInterfaceError): | ||||
|             _LOGGER.warning( | ||||
|                 "No hostname support on the host. Hostname functions have been disabled." | ||||
|             ) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Interface class for D-Bus wrappers.""" | ||||
|  | ||||
| from abc import ABC | ||||
| from collections.abc import Callable | ||||
| from functools import wraps | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| """Interface to Logind over D-Bus.""" | ||||
|  | ||||
| import logging | ||||
|  | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ..exceptions import DBusError, DBusInterfaceError | ||||
| from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||
| from .const import DBUS_NAME_LOGIND, DBUS_OBJECT_LOGIND | ||||
| from .interface import DBusInterface | ||||
| from .utils import dbus_connected | ||||
| @@ -28,8 +29,8 @@ class Logind(DBusInterface): | ||||
|             await super().connect(bus) | ||||
|         except DBusError: | ||||
|             _LOGGER.warning("Can't connect to systemd-logind") | ||||
|         except DBusInterfaceError: | ||||
|             _LOGGER.info("No systemd-logind support on the host.") | ||||
|         except (DBusServiceUnkownError, DBusInterfaceError): | ||||
|             _LOGGER.warning("No systemd-logind support on the host.") | ||||
|  | ||||
|     @dbus_connected | ||||
|     async def reboot(self) -> None: | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """D-Bus interface objects.""" | ||||
|  | ||||
| import asyncio | ||||
| import logging | ||||
|  | ||||
| @@ -17,7 +18,7 @@ from .rauc import Rauc | ||||
| from .resolved import Resolved | ||||
| from .systemd import Systemd | ||||
| from .timedate import TimeDate | ||||
| from .udisks2 import UDisks2 | ||||
| from .udisks2 import UDisks2Manager | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -37,7 +38,7 @@ class DBusManager(CoreSysAttributes): | ||||
|         self._agent: OSAgent = OSAgent() | ||||
|         self._timedate: TimeDate = TimeDate() | ||||
|         self._resolved: Resolved = Resolved() | ||||
|         self._udisks2: UDisks2 = UDisks2() | ||||
|         self._udisks2: UDisks2Manager = UDisks2Manager() | ||||
|         self._bus: MessageBus | None = None | ||||
|  | ||||
|     @property | ||||
| @@ -81,7 +82,7 @@ class DBusManager(CoreSysAttributes): | ||||
|         return self._resolved | ||||
|  | ||||
|     @property | ||||
|     def udisks2(self) -> UDisks2: | ||||
|     def udisks2(self) -> UDisks2Manager: | ||||
|         """Return the udisks2 interface.""" | ||||
|         return self._udisks2 | ||||
|  | ||||
| @@ -128,9 +129,11 @@ class DBusManager(CoreSysAttributes): | ||||
|  | ||||
|         for err in errors: | ||||
|             if err: | ||||
|                 dbus = self.all[errors.index(err)] | ||||
|                 _LOGGER.warning( | ||||
|                     "Can't load dbus interface %s: %s", | ||||
|                     self.all[errors.index(err)].name, | ||||
|                     "Can't load dbus interface %s %s: %s", | ||||
|                     dbus.name, | ||||
|                     dbus.object_path, | ||||
|                     err, | ||||
|                 ) | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Network Manager implementation for DBUS.""" | ||||
|  | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| @@ -9,6 +10,8 @@ from ...exceptions import ( | ||||
|     DBusError, | ||||
|     DBusFatalError, | ||||
|     DBusInterfaceError, | ||||
|     DBusNoReplyError, | ||||
|     DBusServiceUnkownError, | ||||
|     HostNotSupportedError, | ||||
|     NetworkInterfaceNotFound, | ||||
| ) | ||||
| @@ -143,7 +146,7 @@ class NetworkManager(DBusInterfaceProxy): | ||||
|             await self.settings.connect(bus) | ||||
|         except DBusError: | ||||
|             _LOGGER.warning("Can't connect to Network Manager") | ||||
|         except DBusInterfaceError: | ||||
|         except (DBusServiceUnkownError, DBusInterfaceError): | ||||
|             _LOGGER.warning( | ||||
|                 "No Network Manager support on the host. Local network functions have been disabled." | ||||
|             ) | ||||
| @@ -210,8 +213,22 @@ class NetworkManager(DBusInterfaceProxy): | ||||
|                     # try to query it. Ignore those cases. | ||||
|                     _LOGGER.debug("Can't process %s: %s", device, err) | ||||
|                     continue | ||||
|                 except ( | ||||
|                     DBusNoReplyError, | ||||
|                     DBusServiceUnkownError, | ||||
|                 ) as err: | ||||
|                     # This typically means that NetworkManager disappeared. Give up immeaditly. | ||||
|                     _LOGGER.error( | ||||
|                         "NetworkManager not responding while processing %s: %s. Giving up.", | ||||
|                         device, | ||||
|                         err, | ||||
|                     ) | ||||
|                     capture_exception(err) | ||||
|                     return | ||||
|                 except Exception as err:  # pylint: disable=broad-except | ||||
|                     _LOGGER.exception("Error while processing %s: %s", device, err) | ||||
|                     _LOGGER.exception( | ||||
|                         "Unkown error while processing %s: %s", device, err | ||||
|                     ) | ||||
|                     capture_exception(err) | ||||
|                     continue | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """NetworkConnection objects for Network Manager.""" | ||||
|  | ||||
| from dataclasses import dataclass | ||||
| from ipaddress import IPv4Address, IPv6Address | ||||
|  | ||||
| @@ -58,15 +59,26 @@ class VlanProperties: | ||||
|     parent: str | None | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class IpAddress: | ||||
|     """IP address object for Network Manager.""" | ||||
|  | ||||
|     address: str | ||||
|     prefix: int | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class IpProperties: | ||||
|     """IP properties object for Network Manager.""" | ||||
|  | ||||
|     method: str | None | ||||
|     address_data: list[IpAddress] | None | ||||
|     gateway: str | None | ||||
|     dns: list[bytes | int] | None | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class DeviceProperties: | ||||
|     """Device properties object for Network Manager.""" | ||||
| class MatchProperties: | ||||
|     """Match properties object for Network Manager.""" | ||||
|  | ||||
|     match_device: str | None | ||||
|     path: list[str] | None = None | ||||
|   | ||||
| @@ -121,7 +121,7 @@ class NetworkConnection(DBusInterfaceProxy): | ||||
|         self._state_flags = { | ||||
|             flag | ||||
|             for flag in ConnectionStateFlags | ||||
|             if flag.value & self.properties[DBUS_ATTR_STATE_FLAGS] | ||||
|             if flag & self.properties[DBUS_ATTR_STATE_FLAGS] | ||||
|         } or {ConnectionStateFlags.NONE} | ||||
|  | ||||
|         # IPv4 | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| """Network Manager DNS Manager object.""" | ||||
|  | ||||
| from ipaddress import ip_address | ||||
| import logging | ||||
| from typing import Any | ||||
| @@ -12,7 +13,7 @@ from ...const import ( | ||||
|     ATTR_PRIORITY, | ||||
|     ATTR_VPN, | ||||
| ) | ||||
| from ...exceptions import DBusError, DBusInterfaceError | ||||
| from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||
| from ..const import ( | ||||
|     DBUS_ATTR_CONFIGURATION, | ||||
|     DBUS_ATTR_MODE, | ||||
| @@ -67,7 +68,7 @@ class NetworkManagerDNS(DBusInterfaceProxy): | ||||
|             await super().connect(bus) | ||||
|         except DBusError: | ||||
|             _LOGGER.warning("Can't connect to DnsManager") | ||||
|         except DBusInterfaceError: | ||||
|         except (DBusServiceUnkownError, DBusInterfaceError): | ||||
|             _LOGGER.warning( | ||||
|                 "No DnsManager support on the host. Local DNS functions have been disabled." | ||||
|             ) | ||||
|   | ||||
| @@ -11,6 +11,7 @@ from ..const import ( | ||||
|     DBUS_ATTR_DRIVER, | ||||
|     DBUS_ATTR_HWADDRESS, | ||||
|     DBUS_ATTR_MANAGED, | ||||
|     DBUS_ATTR_PATH, | ||||
|     DBUS_IFACE_DEVICE, | ||||
|     DBUS_NAME_NM, | ||||
|     DBUS_OBJECT_BASE, | ||||
| @@ -74,6 +75,12 @@ class NetworkInterface(DBusInterfaceProxy): | ||||
|         """Return hardware address (i.e. mac address) of device.""" | ||||
|         return self.properties[DBUS_ATTR_HWADDRESS] | ||||
|  | ||||
|     @property | ||||
|     @dbus_property | ||||
|     def path(self) -> str: | ||||
|         """Return The path of the device as exposed by the udev property ID_PATH.""" | ||||
|         return self.properties[DBUS_ATTR_PATH] | ||||
|  | ||||
|     @property | ||||
|     def connection(self) -> NetworkConnection | None: | ||||
|         """Return the connection used for this interface.""" | ||||
|   | ||||
| @@ -1,48 +1,72 @@ | ||||
| """Connection object for Network Manager.""" | ||||
|  | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from dbus_fast import Variant | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ....const import ATTR_METHOD, ATTR_MODE, ATTR_PSK, ATTR_SSID | ||||
| from ...const import DBUS_NAME_NM | ||||
| from ...interface import DBusInterface | ||||
| from ...utils import dbus_connected | ||||
| from ..configuration import ( | ||||
|     ConnectionProperties, | ||||
|     DeviceProperties, | ||||
|     EthernetProperties, | ||||
|     IpAddress, | ||||
|     IpProperties, | ||||
|     MatchProperties, | ||||
|     VlanProperties, | ||||
|     WirelessProperties, | ||||
|     WirelessSecurityProperties, | ||||
| ) | ||||
|  | ||||
| CONF_ATTR_CONNECTION = "connection" | ||||
| CONF_ATTR_MATCH = "match" | ||||
| CONF_ATTR_802_ETHERNET = "802-3-ethernet" | ||||
| CONF_ATTR_802_WIRELESS = "802-11-wireless" | ||||
| CONF_ATTR_802_WIRELESS_SECURITY = "802-11-wireless-security" | ||||
| CONF_ATTR_VLAN = "vlan" | ||||
| CONF_ATTR_IPV4 = "ipv4" | ||||
| CONF_ATTR_IPV6 = "ipv6" | ||||
| CONF_ATTR_DEVICE = "device" | ||||
|  | ||||
| ATTR_ID = "id" | ||||
| ATTR_UUID = "uuid" | ||||
| ATTR_TYPE = "type" | ||||
| ATTR_PARENT = "parent" | ||||
| ATTR_ASSIGNED_MAC = "assigned-mac-address" | ||||
| ATTR_POWERSAVE = "powersave" | ||||
| ATTR_AUTH_ALG = "auth-alg" | ||||
| ATTR_KEY_MGMT = "key-mgmt" | ||||
| ATTR_INTERFACE_NAME = "interface-name" | ||||
| ATTR_MATCH_DEVICE = "match-device" | ||||
| CONF_ATTR_CONNECTION_ID = "id" | ||||
| CONF_ATTR_CONNECTION_UUID = "uuid" | ||||
| CONF_ATTR_CONNECTION_TYPE = "type" | ||||
| CONF_ATTR_CONNECTION_LLMNR = "llmnr" | ||||
| CONF_ATTR_CONNECTION_MDNS = "mdns" | ||||
| CONF_ATTR_CONNECTION_AUTOCONNECT = "autoconnect" | ||||
| CONF_ATTR_CONNECTION_INTERFACE_NAME = "interface-name" | ||||
|  | ||||
| CONF_ATTR_MATCH_PATH = "path" | ||||
|  | ||||
| CONF_ATTR_VLAN_ID = "id" | ||||
| CONF_ATTR_VLAN_PARENT = "parent" | ||||
|  | ||||
| CONF_ATTR_802_ETHERNET_ASSIGNED_MAC = "assigned-mac-address" | ||||
|  | ||||
| CONF_ATTR_802_WIRELESS_MODE = "mode" | ||||
| CONF_ATTR_802_WIRELESS_ASSIGNED_MAC = "assigned-mac-address" | ||||
| CONF_ATTR_802_WIRELESS_SSID = "ssid" | ||||
| CONF_ATTR_802_WIRELESS_POWERSAVE = "powersave" | ||||
| CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG = "auth-alg" | ||||
| CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT = "key-mgmt" | ||||
| CONF_ATTR_802_WIRELESS_SECURITY_PSK = "psk" | ||||
|  | ||||
| CONF_ATTR_IPV4_METHOD = "method" | ||||
| CONF_ATTR_IPV4_ADDRESS_DATA = "address-data" | ||||
| CONF_ATTR_IPV4_GATEWAY = "gateway" | ||||
| CONF_ATTR_IPV4_DNS = "dns" | ||||
|  | ||||
| CONF_ATTR_IPV6_METHOD = "method" | ||||
| CONF_ATTR_IPV6_ADDRESS_DATA = "address-data" | ||||
| CONF_ATTR_IPV6_GATEWAY = "gateway" | ||||
| CONF_ATTR_IPV6_DNS = "dns" | ||||
|  | ||||
| IPV4_6_IGNORE_FIELDS = [ | ||||
|     "addresses", | ||||
|     "address-data", | ||||
|     "dns", | ||||
|     "dns-data", | ||||
|     "gateway", | ||||
|     "method", | ||||
| ] | ||||
| @@ -72,7 +96,7 @@ def _merge_settings_attribute( | ||||
| class NetworkSetting(DBusInterface): | ||||
|     """Network connection setting object for Network Manager. | ||||
|  | ||||
|     https://developer.gnome.org/NetworkManager/stable/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html | ||||
|     https://networkmanager.dev/docs/api/1.48.0/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html | ||||
|     """ | ||||
|  | ||||
|     bus_name: str = DBUS_NAME_NM | ||||
| @@ -88,7 +112,7 @@ class NetworkSetting(DBusInterface): | ||||
|         self._vlan: VlanProperties | None = None | ||||
|         self._ipv4: IpProperties | None = None | ||||
|         self._ipv6: IpProperties | None = None | ||||
|         self._device: DeviceProperties | None = None | ||||
|         self._match: MatchProperties | None = None | ||||
|  | ||||
|     @property | ||||
|     def connection(self) -> ConnectionProperties | None: | ||||
| @@ -126,9 +150,9 @@ class NetworkSetting(DBusInterface): | ||||
|         return self._ipv6 | ||||
|  | ||||
|     @property | ||||
|     def device(self) -> DeviceProperties | None: | ||||
|         """Return device properties if any.""" | ||||
|         return self._device | ||||
|     def match(self) -> MatchProperties | None: | ||||
|         """Return match properties if any.""" | ||||
|         return self._match | ||||
|  | ||||
|     @dbus_connected | ||||
|     async def get_settings(self) -> dict[str, Any]: | ||||
| @@ -146,7 +170,7 @@ class NetworkSetting(DBusInterface): | ||||
|             new_settings, | ||||
|             settings, | ||||
|             CONF_ATTR_CONNECTION, | ||||
|             ignore_current_value=[ATTR_INTERFACE_NAME], | ||||
|             ignore_current_value=[CONF_ATTR_CONNECTION_INTERFACE_NAME], | ||||
|         ) | ||||
|         _merge_settings_attribute(new_settings, settings, CONF_ATTR_802_ETHERNET) | ||||
|         _merge_settings_attribute(new_settings, settings, CONF_ATTR_802_WIRELESS) | ||||
| @@ -166,7 +190,7 @@ class NetworkSetting(DBusInterface): | ||||
|             CONF_ATTR_IPV6, | ||||
|             ignore_current_value=IPV4_6_IGNORE_FIELDS, | ||||
|         ) | ||||
|         _merge_settings_attribute(new_settings, settings, CONF_ATTR_DEVICE) | ||||
|         _merge_settings_attribute(new_settings, settings, CONF_ATTR_MATCH) | ||||
|  | ||||
|         await self.dbus.Settings.Connection.call_update(new_settings) | ||||
|  | ||||
| @@ -191,49 +215,69 @@ class NetworkSetting(DBusInterface): | ||||
|         # See: https://developer-old.gnome.org/NetworkManager/stable/ch01.html | ||||
|         if CONF_ATTR_CONNECTION in data: | ||||
|             self._connection = ConnectionProperties( | ||||
|                 data[CONF_ATTR_CONNECTION].get(ATTR_ID), | ||||
|                 data[CONF_ATTR_CONNECTION].get(ATTR_UUID), | ||||
|                 data[CONF_ATTR_CONNECTION].get(ATTR_TYPE), | ||||
|                 data[CONF_ATTR_CONNECTION].get(ATTR_INTERFACE_NAME), | ||||
|                 data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_ID), | ||||
|                 data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_UUID), | ||||
|                 data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_TYPE), | ||||
|                 data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_INTERFACE_NAME), | ||||
|             ) | ||||
|  | ||||
|         if CONF_ATTR_802_ETHERNET in data: | ||||
|             self._ethernet = EthernetProperties( | ||||
|                 data[CONF_ATTR_802_ETHERNET].get(ATTR_ASSIGNED_MAC), | ||||
|                 data[CONF_ATTR_802_ETHERNET].get(CONF_ATTR_802_ETHERNET_ASSIGNED_MAC), | ||||
|             ) | ||||
|  | ||||
|         if CONF_ATTR_802_WIRELESS in data: | ||||
|             self._wireless = WirelessProperties( | ||||
|                 bytes(data[CONF_ATTR_802_WIRELESS].get(ATTR_SSID, [])).decode(), | ||||
|                 data[CONF_ATTR_802_WIRELESS].get(ATTR_ASSIGNED_MAC), | ||||
|                 data[CONF_ATTR_802_WIRELESS].get(ATTR_MODE), | ||||
|                 data[CONF_ATTR_802_WIRELESS].get(ATTR_POWERSAVE), | ||||
|                 bytes( | ||||
|                     data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_SSID, []) | ||||
|                 ).decode(), | ||||
|                 data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_ASSIGNED_MAC), | ||||
|                 data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_MODE), | ||||
|                 data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_POWERSAVE), | ||||
|             ) | ||||
|  | ||||
|         if CONF_ATTR_802_WIRELESS_SECURITY in data: | ||||
|             self._wireless_security = WirelessSecurityProperties( | ||||
|                 data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_AUTH_ALG), | ||||
|                 data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_KEY_MGMT), | ||||
|                 data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_PSK), | ||||
|                 data[CONF_ATTR_802_WIRELESS_SECURITY].get( | ||||
|                     CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG | ||||
|                 ), | ||||
|                 data[CONF_ATTR_802_WIRELESS_SECURITY].get( | ||||
|                     CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT | ||||
|                 ), | ||||
|                 data[CONF_ATTR_802_WIRELESS_SECURITY].get( | ||||
|                     CONF_ATTR_802_WIRELESS_SECURITY_PSK | ||||
|                 ), | ||||
|             ) | ||||
|  | ||||
|         if CONF_ATTR_VLAN in data: | ||||
|             self._vlan = VlanProperties( | ||||
|                 data[CONF_ATTR_VLAN].get(ATTR_ID), | ||||
|                 data[CONF_ATTR_VLAN].get(ATTR_PARENT), | ||||
|                 data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_ID), | ||||
|                 data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_PARENT), | ||||
|             ) | ||||
|  | ||||
|         if CONF_ATTR_IPV4 in data: | ||||
|             address_data = None | ||||
|             if ips := data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_ADDRESS_DATA): | ||||
|                 address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips] | ||||
|             self._ipv4 = IpProperties( | ||||
|                 data[CONF_ATTR_IPV4].get(ATTR_METHOD), | ||||
|                 data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_METHOD), | ||||
|                 address_data, | ||||
|                 data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_GATEWAY), | ||||
|                 data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_DNS), | ||||
|             ) | ||||
|  | ||||
|         if CONF_ATTR_IPV6 in data: | ||||
|             address_data = None | ||||
|             if ips := data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_ADDRESS_DATA): | ||||
|                 address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips] | ||||
|             self._ipv6 = IpProperties( | ||||
|                 data[CONF_ATTR_IPV6].get(ATTR_METHOD), | ||||
|                 data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_METHOD), | ||||
|                 address_data, | ||||
|                 data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_GATEWAY), | ||||
|                 data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_DNS), | ||||
|             ) | ||||
|  | ||||
|         if CONF_ATTR_DEVICE in data: | ||||
|             self._device = DeviceProperties( | ||||
|                 data[CONF_ATTR_DEVICE].get(ATTR_MATCH_DEVICE) | ||||
|         if CONF_ATTR_MATCH in data: | ||||
|             self._match = MatchProperties( | ||||
|                 data[CONF_ATTR_MATCH].get(CONF_ATTR_MATCH_PATH) | ||||
|             ) | ||||
|   | ||||
| @@ -1,47 +1,158 @@ | ||||
| """Payload generators for DBUS communication.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import socket | ||||
| from typing import TYPE_CHECKING, Any | ||||
| from typing import TYPE_CHECKING | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from dbus_fast import Variant | ||||
|  | ||||
| from . import ( | ||||
|     ATTR_ASSIGNED_MAC, | ||||
|     ATTR_MATCH_DEVICE, | ||||
|     CONF_ATTR_802_ETHERNET, | ||||
|     CONF_ATTR_802_WIRELESS, | ||||
|     CONF_ATTR_802_WIRELESS_SECURITY, | ||||
|     CONF_ATTR_CONNECTION, | ||||
|     CONF_ATTR_DEVICE, | ||||
|     CONF_ATTR_IPV4, | ||||
|     CONF_ATTR_IPV6, | ||||
|     CONF_ATTR_VLAN, | ||||
| ) | ||||
| from ....host.const import InterfaceMethod, InterfaceType | ||||
| from .. import NetworkManager | ||||
| from . import ( | ||||
|     CONF_ATTR_802_ETHERNET, | ||||
|     CONF_ATTR_802_ETHERNET_ASSIGNED_MAC, | ||||
|     CONF_ATTR_802_WIRELESS, | ||||
|     CONF_ATTR_802_WIRELESS_ASSIGNED_MAC, | ||||
|     CONF_ATTR_802_WIRELESS_MODE, | ||||
|     CONF_ATTR_802_WIRELESS_POWERSAVE, | ||||
|     CONF_ATTR_802_WIRELESS_SECURITY, | ||||
|     CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG, | ||||
|     CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT, | ||||
|     CONF_ATTR_802_WIRELESS_SECURITY_PSK, | ||||
|     CONF_ATTR_802_WIRELESS_SSID, | ||||
|     CONF_ATTR_CONNECTION, | ||||
|     CONF_ATTR_CONNECTION_AUTOCONNECT, | ||||
|     CONF_ATTR_CONNECTION_ID, | ||||
|     CONF_ATTR_CONNECTION_LLMNR, | ||||
|     CONF_ATTR_CONNECTION_MDNS, | ||||
|     CONF_ATTR_CONNECTION_TYPE, | ||||
|     CONF_ATTR_CONNECTION_UUID, | ||||
|     CONF_ATTR_IPV4, | ||||
|     CONF_ATTR_IPV4_ADDRESS_DATA, | ||||
|     CONF_ATTR_IPV4_DNS, | ||||
|     CONF_ATTR_IPV4_GATEWAY, | ||||
|     CONF_ATTR_IPV4_METHOD, | ||||
|     CONF_ATTR_IPV6, | ||||
|     CONF_ATTR_IPV6_ADDRESS_DATA, | ||||
|     CONF_ATTR_IPV6_DNS, | ||||
|     CONF_ATTR_IPV6_GATEWAY, | ||||
|     CONF_ATTR_IPV6_METHOD, | ||||
|     CONF_ATTR_MATCH, | ||||
|     CONF_ATTR_MATCH_PATH, | ||||
|     CONF_ATTR_VLAN, | ||||
|     CONF_ATTR_VLAN_ID, | ||||
|     CONF_ATTR_VLAN_PARENT, | ||||
| ) | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from ....host.configuration import Interface | ||||
|  | ||||
|  | ||||
| def _get_ipv4_connection_settings(ipv4setting) -> dict: | ||||
|     ipv4 = {} | ||||
|     if not ipv4setting or ipv4setting.method == InterfaceMethod.AUTO: | ||||
|         ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "auto") | ||||
|     elif ipv4setting.method == InterfaceMethod.DISABLED: | ||||
|         ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "disabled") | ||||
|     elif ipv4setting.method == InterfaceMethod.STATIC: | ||||
|         ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "manual") | ||||
|  | ||||
|         address_data = [] | ||||
|         for address in ipv4setting.address: | ||||
|             address_data.append( | ||||
|                 { | ||||
|                     "address": Variant("s", str(address.ip)), | ||||
|                     "prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])), | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|         ipv4[CONF_ATTR_IPV4_ADDRESS_DATA] = Variant("aa{sv}", address_data) | ||||
|         if ipv4setting.gateway: | ||||
|             ipv4[CONF_ATTR_IPV4_GATEWAY] = Variant("s", str(ipv4setting.gateway)) | ||||
|     else: | ||||
|         raise RuntimeError("Invalid IPv4 InterfaceMethod") | ||||
|  | ||||
|     if ( | ||||
|         ipv4setting | ||||
|         and ipv4setting.nameservers | ||||
|         and ipv4setting.method | ||||
|         in ( | ||||
|             InterfaceMethod.AUTO, | ||||
|             InterfaceMethod.STATIC, | ||||
|         ) | ||||
|     ): | ||||
|         nameservers = ipv4setting.nameservers if ipv4setting else [] | ||||
|         ipv4[CONF_ATTR_IPV4_DNS] = Variant( | ||||
|             "au", | ||||
|             [socket.htonl(int(ip_address)) for ip_address in nameservers], | ||||
|         ) | ||||
|  | ||||
|     return ipv4 | ||||
|  | ||||
|  | ||||
| def _get_ipv6_connection_settings(ipv6setting) -> dict: | ||||
|     ipv6 = {} | ||||
|     if not ipv6setting or ipv6setting.method == InterfaceMethod.AUTO: | ||||
|         ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "auto") | ||||
|     elif ipv6setting.method == InterfaceMethod.DISABLED: | ||||
|         ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "link-local") | ||||
|     elif ipv6setting.method == InterfaceMethod.STATIC: | ||||
|         ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "manual") | ||||
|  | ||||
|         address_data = [] | ||||
|         for address in ipv6setting.address: | ||||
|             address_data.append( | ||||
|                 { | ||||
|                     "address": Variant("s", str(address.ip)), | ||||
|                     "prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])), | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|         ipv6[CONF_ATTR_IPV6_ADDRESS_DATA] = Variant("aa{sv}", address_data) | ||||
|         if ipv6setting.gateway: | ||||
|             ipv6[CONF_ATTR_IPV6_GATEWAY] = Variant("s", str(ipv6setting.gateway)) | ||||
|     else: | ||||
|         raise RuntimeError("Invalid IPv6 InterfaceMethod") | ||||
|  | ||||
|     if ( | ||||
|         ipv6setting | ||||
|         and ipv6setting.nameservers | ||||
|         and ipv6setting.method | ||||
|         in ( | ||||
|             InterfaceMethod.AUTO, | ||||
|             InterfaceMethod.STATIC, | ||||
|         ) | ||||
|     ): | ||||
|         nameservers = ipv6setting.nameservers if ipv6setting else [] | ||||
|         ipv6[CONF_ATTR_IPV6_DNS] = Variant( | ||||
|             "aay", | ||||
|             [ip_address.packed for ip_address in nameservers], | ||||
|         ) | ||||
|     return ipv6 | ||||
|  | ||||
|  | ||||
| def get_connection_from_interface( | ||||
|     interface: Interface, name: str | None = None, uuid: str | None = None | ||||
| ) -> Any: | ||||
|     interface: Interface, | ||||
|     network_manager: NetworkManager, | ||||
|     name: str | None = None, | ||||
|     uuid: str | None = None, | ||||
| ) -> dict[str, dict[str, Variant]]: | ||||
|     """Generate message argument for network interface update.""" | ||||
|  | ||||
|     # Generate/Update ID/name | ||||
|     if not name or not name.startswith("Supervisor"): | ||||
|         name = f"Supervisor {interface.name}" | ||||
|     if interface.type == InterfaceType.VLAN: | ||||
|         name = f"{name}.{interface.vlan.id}" | ||||
|         if interface.type == InterfaceType.VLAN: | ||||
|             name = f"{name}.{interface.vlan.id}" | ||||
|  | ||||
|     if interface.type == InterfaceType.ETHERNET: | ||||
|         iftype = "802-3-ethernet" | ||||
|     elif interface.type == InterfaceType.WIRELESS: | ||||
|         iftype = "802-11-wireless" | ||||
|     else: | ||||
|         iftype = interface.type.value | ||||
|         iftype = interface.type | ||||
|  | ||||
|     # Generate UUID | ||||
|     if not uuid: | ||||
| @@ -49,104 +160,77 @@ def get_connection_from_interface( | ||||
|  | ||||
|     conn: dict[str, dict[str, Variant]] = { | ||||
|         CONF_ATTR_CONNECTION: { | ||||
|             "id": Variant("s", name), | ||||
|             "type": Variant("s", iftype), | ||||
|             "uuid": Variant("s", uuid), | ||||
|             "llmnr": Variant("i", 2), | ||||
|             "mdns": Variant("i", 2), | ||||
|             "autoconnect": Variant("b", True), | ||||
|             CONF_ATTR_CONNECTION_ID: Variant("s", name), | ||||
|             CONF_ATTR_CONNECTION_UUID: Variant("s", uuid), | ||||
|             CONF_ATTR_CONNECTION_TYPE: Variant("s", iftype), | ||||
|             CONF_ATTR_CONNECTION_LLMNR: Variant("i", 2), | ||||
|             CONF_ATTR_CONNECTION_MDNS: Variant("i", 2), | ||||
|             CONF_ATTR_CONNECTION_AUTOCONNECT: Variant("b", True), | ||||
|         }, | ||||
|     } | ||||
|  | ||||
|     if interface.type != InterfaceType.VLAN: | ||||
|         conn[CONF_ATTR_DEVICE] = { | ||||
|             ATTR_MATCH_DEVICE: Variant( | ||||
|                 "s", f"mac:{interface.mac},interface-name:{interface.name}" | ||||
|             ) | ||||
|         } | ||||
|         if interface.path: | ||||
|             conn[CONF_ATTR_MATCH] = { | ||||
|                 CONF_ATTR_MATCH_PATH: Variant("as", [interface.path]) | ||||
|             } | ||||
|         else: | ||||
|             conn[CONF_ATTR_CONNECTION]["interface-name"] = Variant("s", interface.name) | ||||
|  | ||||
|     ipv4 = {} | ||||
|     if not interface.ipv4 or interface.ipv4.method == InterfaceMethod.AUTO: | ||||
|         ipv4["method"] = Variant("s", "auto") | ||||
|     elif interface.ipv4.method == InterfaceMethod.DISABLED: | ||||
|         ipv4["method"] = Variant("s", "disabled") | ||||
|     else: | ||||
|         ipv4["method"] = Variant("s", "manual") | ||||
|         ipv4["dns"] = Variant( | ||||
|             "au", | ||||
|             [ | ||||
|                 socket.htonl(int(ip_address)) | ||||
|                 for ip_address in interface.ipv4.nameservers | ||||
|             ], | ||||
|         ) | ||||
|     conn[CONF_ATTR_IPV4] = _get_ipv4_connection_settings(interface.ipv4setting) | ||||
|  | ||||
|         adressdata = [] | ||||
|         for address in interface.ipv4.address: | ||||
|             adressdata.append( | ||||
|                 { | ||||
|                     "address": Variant("s", str(address.ip)), | ||||
|                     "prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])), | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|         ipv4["address-data"] = Variant("aa{sv}", adressdata) | ||||
|         ipv4["gateway"] = Variant("s", str(interface.ipv4.gateway)) | ||||
|  | ||||
|     conn[CONF_ATTR_IPV4] = ipv4 | ||||
|  | ||||
|     ipv6 = {} | ||||
|     if not interface.ipv6 or interface.ipv6.method == InterfaceMethod.AUTO: | ||||
|         ipv6["method"] = Variant("s", "auto") | ||||
|     elif interface.ipv6.method == InterfaceMethod.DISABLED: | ||||
|         ipv6["method"] = Variant("s", "link-local") | ||||
|     else: | ||||
|         ipv6["method"] = Variant("s", "manual") | ||||
|         ipv6["dns"] = Variant( | ||||
|             "aay", [ip_address.packed for ip_address in interface.ipv6.nameservers] | ||||
|         ) | ||||
|  | ||||
|         adressdata = [] | ||||
|         for address in interface.ipv6.address: | ||||
|             adressdata.append( | ||||
|                 { | ||||
|                     "address": Variant("s", str(address.ip)), | ||||
|                     "prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])), | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|         ipv6["address-data"] = Variant("aa{sv}", adressdata) | ||||
|         ipv6["gateway"] = Variant("s", str(interface.ipv6.gateway)) | ||||
|  | ||||
|     conn[CONF_ATTR_IPV6] = ipv6 | ||||
|     conn[CONF_ATTR_IPV6] = _get_ipv6_connection_settings(interface.ipv6setting) | ||||
|  | ||||
|     if interface.type == InterfaceType.ETHERNET: | ||||
|         conn[CONF_ATTR_802_ETHERNET] = {ATTR_ASSIGNED_MAC: Variant("s", "preserve")} | ||||
|         conn[CONF_ATTR_802_ETHERNET] = { | ||||
|             CONF_ATTR_802_ETHERNET_ASSIGNED_MAC: Variant("s", "preserve") | ||||
|         } | ||||
|     elif interface.type == "vlan": | ||||
|         parent = interface.vlan.interface | ||||
|         if parent in network_manager and ( | ||||
|             parent_connection := network_manager.get(parent).connection | ||||
|         ): | ||||
|             parent = parent_connection.uuid | ||||
|  | ||||
|         conn[CONF_ATTR_VLAN] = { | ||||
|             "id": Variant("u", interface.vlan.id), | ||||
|             "parent": Variant("s", interface.vlan.interface), | ||||
|             CONF_ATTR_VLAN_ID: Variant("u", interface.vlan.id), | ||||
|             CONF_ATTR_VLAN_PARENT: Variant("s", parent), | ||||
|         } | ||||
|     elif interface.type == InterfaceType.WIRELESS: | ||||
|         wireless = { | ||||
|             ATTR_ASSIGNED_MAC: Variant("s", "preserve"), | ||||
|             "ssid": Variant("ay", interface.wifi.ssid.encode("UTF-8")), | ||||
|             "mode": Variant("s", "infrastructure"), | ||||
|             "powersave": Variant("i", 1), | ||||
|             CONF_ATTR_802_WIRELESS_ASSIGNED_MAC: Variant("s", "preserve"), | ||||
|             CONF_ATTR_802_WIRELESS_MODE: Variant("s", "infrastructure"), | ||||
|             CONF_ATTR_802_WIRELESS_POWERSAVE: Variant("i", 1), | ||||
|         } | ||||
|         if interface.wifi and interface.wifi.ssid: | ||||
|             wireless[CONF_ATTR_802_WIRELESS_SSID] = Variant( | ||||
|                 "ay", interface.wifi.ssid.encode("UTF-8") | ||||
|             ) | ||||
|  | ||||
|         conn[CONF_ATTR_802_WIRELESS] = wireless | ||||
|  | ||||
|         if interface.wifi.auth != "open": | ||||
|         if interface.wifi and interface.wifi.auth != "open": | ||||
|             wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY) | ||||
|             wireless_security = {} | ||||
|             if interface.wifi.auth == "wep": | ||||
|                 wireless_security["auth-alg"] = Variant("s", "none") | ||||
|                 wireless_security["key-mgmt"] = Variant("s", "open") | ||||
|                 wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant( | ||||
|                     "s", "open" | ||||
|                 ) | ||||
|                 wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant( | ||||
|                     "s", "none" | ||||
|                 ) | ||||
|             elif interface.wifi.auth == "wpa-psk": | ||||
|                 wireless_security["auth-alg"] = Variant("s", "open") | ||||
|                 wireless_security["key-mgmt"] = Variant("s", "wpa-psk") | ||||
|                 wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant( | ||||
|                     "s", "open" | ||||
|                 ) | ||||
|                 wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant( | ||||
|                     "s", "wpa-psk" | ||||
|                 ) | ||||
|  | ||||
|             if interface.wifi.psk: | ||||
|                 wireless_security["psk"] = Variant("s", interface.wifi.psk) | ||||
|                 wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_PSK] = Variant( | ||||
|                     "s", interface.wifi.psk | ||||
|                 ) | ||||
|             conn[CONF_ATTR_802_WIRELESS_SECURITY] = wireless_security | ||||
|  | ||||
|     return conn | ||||
|   | ||||
| @@ -1,10 +1,11 @@ | ||||
| """Network Manager implementation for DBUS.""" | ||||
|  | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from dbus_fast.aio.message_bus import MessageBus | ||||
|  | ||||
| from ...exceptions import DBusError, DBusInterfaceError | ||||
| from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||
| from ..const import DBUS_NAME_NM, DBUS_OBJECT_SETTINGS | ||||
| from ..interface import DBusInterface | ||||
| from ..network.setting import NetworkSetting | ||||
| @@ -28,7 +29,7 @@ class NetworkManagerSettings(DBusInterface): | ||||
|             await super().connect(bus) | ||||
|         except DBusError: | ||||
|             _LOGGER.warning("Can't connect to Network Manager Settings") | ||||
|         except DBusInterfaceError: | ||||
|         except (DBusServiceUnkownError, DBusInterfaceError): | ||||
|             _LOGGER.warning( | ||||
|                 "No Network Manager Settings support on the host. Local network functions have been disabled." | ||||
|             ) | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user