mirror of
				https://github.com/home-assistant/supervisor.git
				synced 2025-10-31 14:39:30 +00:00 
			
		
		
		
	Compare commits
	
		
			285 Commits
		
	
	
		
			2023.11.4
			...
			need-updat
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | b7c53d9e40 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b684c8673e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 547f42439d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c51ceb000f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4cbede1bc8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5eac8c7780 | ||
|   | ab78d87304 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 09166e3867 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8a5c813cdd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4200622f43 | ||
|   | c4452a85b4 | ||
|   | e57de4a3c1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9fd2c91c55 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | fbd70013a8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8d18f3e66e | ||
|   | 5f5754e860 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 974c882b9a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a9ea90096b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 45c72c426e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4e5b75fe19 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3cd617e68f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ddff02f73b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b59347b3d3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1dc769076f | ||
|   | f150a19c0f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c4bc1e3824 | ||
|   | eca99b69db | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 043af72847 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 05c7b6c639 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3385c99f1f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 895117f857 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9e3135e2de | ||
|   | 9a1c517437 | ||
|   | c0c0c4b7ad | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | be6e39fed0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b384921ee0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0d05a6eae3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 430aef68c6 | ||
|   | eac6070e12 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6693b7c2e6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7898c3e433 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 420ecd064e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4289be53f8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 29b41b564e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 998eb69583 | ||
|   | 8ebc097ff4 | ||
|   | c05984ca49 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1a700c3013 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a9c92cdec8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | da8b938d5b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 71e91328f1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6356be4c52 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e26e5440b6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | fecfbd1a3e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c00d6dfc76 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 85be66d90d | ||
|   | 1ac506b391 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f7738b77de | ||
|   | 824037bb7d | ||
|   | 221292ad14 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 16f8c75e9f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 90a37079f1 | ||
|   | 798092af5e | ||
|   | 2a622a929d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ca8eeaa68c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d1b8ac1249 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3f629c4d60 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3fa910e68b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e3cf2989c9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 136b2f402d | ||
|   | 8d18d2d9c6 | ||
|   | f18213361a | ||
|   | 18d9d32bca | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1246e429c9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 77bc46bc37 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ce16963c94 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a70e8cfe58 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ba922a1aaa | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b09230a884 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f1cb9ca08e | ||
|   | 06513e88c6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b4a79bd068 | ||
|   | dfd8fe84e0 | ||
|   | 4857c2e243 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7d384f6160 | ||
|   | 672a7621f9 | ||
|   | f0e2fb3f57 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8c3a520512 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 22e50d56db | ||
|   | a0735f3585 | ||
|   | 50a2e8fde3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 55ed63cc79 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 97e9dfff3f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 501c9579fb | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f9aedadee6 | ||
|   | c3c17b2bc3 | ||
|   | a894c4589e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 56a8a1b5a1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | be3f7a6c37 | ||
|   | 906e400ab7 | ||
|   | a9265afd4c | ||
|   | d26058ac80 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ebd1f30606 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c78e077649 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 07619223b0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 25c326ec6c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | df167b94c2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3730908881 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 975dc1bc11 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 31409f0c32 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b19273227b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f89179fb03 | ||
|   | 90c971f9f1 | ||
|   | d685780a4a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b6bc8b7b7c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 92daba898f | ||
|   | 138843591e | ||
|   | 0814552b2a | ||
|   | 0e0fadd72d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5426bd4392 | ||
|   | 3520a65099 | ||
|   | b15a5c2c87 | ||
|   | a8af04ff82 | ||
|   | 2148de45a0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c4143dacee | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a8025e77b3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | dd1e76be93 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 36f997959a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c1faed163a | ||
|   | 9ca927dbe7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 02c6011818 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2e96b16396 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 53b8de6c1c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | daea9f893c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d1b5b1734c | ||
|   | 74a5899626 | ||
|   | 202ebf6d4e | ||
|   | 2c7b417e25 | ||
|   | bb5e138134 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3a2c3e2f84 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d5be0c34ac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ea5431ef2b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9c4cdcd11f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e5ef6333e4 | ||
|   | 98779a48b1 | ||
|   | 9d4848ee77 | ||
|   | 5126820619 | ||
|   | 8b5c808e8c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9c75996c40 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d524778e42 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 52d4bc660e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8884696a6c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d493ccde28 | ||
|   | 1ececaaaa2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 91b48ad432 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f3fe40a19f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | cf4b29c425 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4344e14a9d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | df935ec423 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e7f9f7504e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5721b2353a | ||
|   | c9de846d0e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a598108c26 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5467aa399d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | da052b074a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 90c035edd0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | fc4eb44a24 | ||
|   | a71111b378 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 52e0c7e484 | ||
|   | e32970f191 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 897cc36017 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d79c575860 | ||
|   | 1f19f84edd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 27c37b8b84 | ||
|   | 06a5dd3153 | ||
|   | b5bf270d22 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8e71d69a64 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 06edb6f8a8 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | dca82ec0a1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9c82ce4103 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8a23a9eb1b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e1b7e515df | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c8ff335ed7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5736da8ab7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 060bba4dce | ||
|   | 4c573991d2 | ||
|   | 7fd6dce55f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1861d756e9 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c36c041f5e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c3d877bdd2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1242030d4a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1626e74608 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b1b913777f | ||
|   | 190894010c | ||
|   | 765265723c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7e20502379 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 366fc30e9d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | aa91788a69 | ||
|   | 375789b019 | ||
|   | 140b769a42 | ||
|   | 88d718271d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6ed26cdd1f | ||
|   | d1851fa607 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e846157c52 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e190bb4c1a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 137fbe7acd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 9ccdb2ae3a | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f5f7515744 | ||
|   | ddadbec7e3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d24543e103 | ||
|   | f80c4c9565 | ||
|   | 480b383782 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d3efd4c24b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 67a0acffa2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 41b07da399 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a6ce55d5b5 | ||
|   | 98c01fe1b3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 51df986222 | ||
|   | 9c625f93a5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7101d47e2e | ||
|   | eb85be2770 | ||
|   | 2da27937a5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2a29b801a4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 57e65714b0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0ae40cb51c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ddd195dfc6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 54b9f23ec5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 242dd3e626 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1b8acb5b60 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a7ab96ab12 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 06ab11cf87 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1410a1b06e | ||
|   | 5baf19f7a3 | ||
|   | 6c66a7ba17 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 37b6e09475 | ||
|   | e08c8ca26d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2c09e7929f | ||
|   | 3e760f0d85 | ||
|   | 3cc6bd19ad | ||
|   | b7ddfba71d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 32f21d208f | ||
|   | ed7edd9fe0 | ||
|   | fd3c995c7c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c0d1a2d53b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 76bc3015a7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ad2896243b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d0dcded42d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a0dfa01287 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4ec5c90180 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a0c813bfc1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5f7b3a7087 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6426f02a2c | ||
|   | 7fef92c480 | ||
|   | c64744dedf | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 72a2088931 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | db54556b0f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a2653d8462 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ef778238f6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4cc0ddc35d | ||
|   | a0429179a0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5cfb45c668 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a53b7041f5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f534fae293 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f7cbd968d2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 844d76290c | ||
|   | 8c8122eee0 | ||
|   | d63f0d5e0b | ||
|   | 96f4ba5d25 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 72e64676da | ||
|   | 883e54f989 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c2d4be3304 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | de737ddb91 | ||
|   | 11ec6dd9ac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | df7541e397 | ||
|   | 95ac53d780 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e8c4b32a65 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | eca535c978 | ||
|   | 9088810b49 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 172a7053ed | ||
|   | 3d5bd2adef | ||
|   | cb03d039f4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bb31b1bc6e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 727532858e | ||
|   | c0868d9dac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ce26e1dac6 | 
| @@ -10,11 +10,13 @@ | |||||||
|   "customizations": { |   "customizations": { | ||||||
|     "vscode": { |     "vscode": { | ||||||
|       "extensions": [ |       "extensions": [ | ||||||
|         "ms-python.python", |         "charliermarsh.ruff", | ||||||
|         "ms-python.pylint", |         "ms-python.pylint", | ||||||
|         "ms-python.vscode-pylance", |         "ms-python.vscode-pylance", | ||||||
|         "visualstudioexptteam.vscodeintellicode", |         "visualstudioexptteam.vscodeintellicode", | ||||||
|         "esbenp.prettier-vscode" |         "redhat.vscode-yaml", | ||||||
|  |         "esbenp.prettier-vscode", | ||||||
|  |         "GitHub.vscode-pull-request-github" | ||||||
|       ], |       ], | ||||||
|       "settings": { |       "settings": { | ||||||
|         "terminal.integrated.profiles.linux": { |         "terminal.integrated.profiles.linux": { | ||||||
| @@ -28,9 +30,9 @@ | |||||||
|         "editor.formatOnType": true, |         "editor.formatOnType": true, | ||||||
|         "files.trimTrailingWhitespace": true, |         "files.trimTrailingWhitespace": true, | ||||||
|         "python.pythonPath": "/usr/local/bin/python3", |         "python.pythonPath": "/usr/local/bin/python3", | ||||||
|         "python.formatting.provider": "black", |         "[python]": { | ||||||
|         "python.formatting.blackArgs": ["--target-version", "py311"], |           "editor.defaultFormatter": "charliermarsh.ruff" | ||||||
|         "python.formatting.blackPath": "/usr/local/bin/black" |         } | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|   }, |   }, | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								.github/PULL_REQUEST_TEMPLATE.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/PULL_REQUEST_TEMPLATE.md
									
									
									
									
										vendored
									
									
								
							| @@ -52,7 +52,7 @@ | |||||||
| - [ ] Local tests pass. **Your PR cannot be merged unless tests pass** | - [ ] Local tests pass. **Your PR cannot be merged unless tests pass** | ||||||
| - [ ] There is no commented out code in this PR. | - [ ] There is no commented out code in this PR. | ||||||
| - [ ] I have followed the [development checklist][dev-checklist] | - [ ] I have followed the [development checklist][dev-checklist] | ||||||
| - [ ] The code has been formatted using Black (`black --fast supervisor tests`) | - [ ] The code has been formatted using Ruff (`ruff format supervisor tests`) | ||||||
| - [ ] Tests have been added to verify that the new code works. | - [ ] Tests have been added to verify that the new code works. | ||||||
|  |  | ||||||
| If API endpoints of add-on configuration are added/changed: | If API endpoints of add-on configuration are added/changed: | ||||||
|   | |||||||
							
								
								
									
										36
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										36
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							| @@ -33,7 +33,7 @@ on: | |||||||
|       - setup.py |       - setup.py | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   DEFAULT_PYTHON: "3.11" |   DEFAULT_PYTHON: "3.12" | ||||||
|   BUILD_NAME: supervisor |   BUILD_NAME: supervisor | ||||||
|   BUILD_TYPE: supervisor |   BUILD_TYPE: supervisor | ||||||
|  |  | ||||||
| @@ -53,7 +53,7 @@ jobs: | |||||||
|       requirements: ${{ steps.requirements.outputs.changed }} |       requirements: ${{ steps.requirements.outputs.changed }} | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|  |  | ||||||
| @@ -70,13 +70,13 @@ jobs: | |||||||
|       - name: Get changed files |       - name: Get changed files | ||||||
|         id: changed_files |         id: changed_files | ||||||
|         if: steps.version.outputs.publish == 'false' |         if: steps.version.outputs.publish == 'false' | ||||||
|         uses: jitterbit/get-changed-files@v1 |         uses: masesgroup/retrieve-changed-files@v3.0.0 | ||||||
|  |  | ||||||
|       - name: Check if requirements files changed |       - name: Check if requirements files changed | ||||||
|         id: requirements |         id: requirements | ||||||
|         run: | |         run: | | ||||||
|           if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then |           if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then | ||||||
|             echo "::set-output name=changed::true" |             echo "changed=true" >> "$GITHUB_OUTPUT" | ||||||
|           fi |           fi | ||||||
|  |  | ||||||
|   build: |   build: | ||||||
| @@ -92,7 +92,7 @@ jobs: | |||||||
|         arch: ${{ fromJson(needs.init.outputs.architectures) }} |         arch: ${{ fromJson(needs.init.outputs.architectures) }} | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|  |  | ||||||
| @@ -106,9 +106,9 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Build wheels |       - name: Build wheels | ||||||
|         if: needs.init.outputs.requirements == 'true' |         if: needs.init.outputs.requirements == 'true' | ||||||
|         uses: home-assistant/wheels@2023.10.5 |         uses: home-assistant/wheels@2024.01.0 | ||||||
|         with: |         with: | ||||||
|           abi: cp311 |           abi: cp312 | ||||||
|           tag: musllinux_1_2 |           tag: musllinux_1_2 | ||||||
|           arch: ${{ matrix.arch }} |           arch: ${{ matrix.arch }} | ||||||
|           wheels-key: ${{ secrets.WHEELS_KEY }} |           wheels-key: ${{ secrets.WHEELS_KEY }} | ||||||
| @@ -125,20 +125,20 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Set up Python ${{ env.DEFAULT_PYTHON }} |       - name: Set up Python ${{ env.DEFAULT_PYTHON }} | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ env.DEFAULT_PYTHON }} |           python-version: ${{ env.DEFAULT_PYTHON }} | ||||||
|  |  | ||||||
|       - name: Install Cosign |       - name: Install Cosign | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: sigstore/cosign-installer@v3.2.0 |         uses: sigstore/cosign-installer@v3.5.0 | ||||||
|         with: |         with: | ||||||
|           cosign-release: "v2.0.2" |           cosign-release: "v2.2.3" | ||||||
|  |  | ||||||
|       - name: Install dirhash and calc hash |       - name: Install dirhash and calc hash | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         run: | |         run: | | ||||||
|           pip3 install dirhash |           pip3 install setuptools dirhash | ||||||
|           dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")" |           dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")" | ||||||
|           echo "${dir_hash}" > rootfs/supervisor.sha256 |           echo "${dir_hash}" > rootfs/supervisor.sha256 | ||||||
|  |  | ||||||
| @@ -149,7 +149,7 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Login to GitHub Container Registry |       - name: Login to GitHub Container Registry | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: docker/login-action@v3.0.0 |         uses: docker/login-action@v3.2.0 | ||||||
|         with: |         with: | ||||||
|           registry: ghcr.io |           registry: ghcr.io | ||||||
|           username: ${{ github.repository_owner }} |           username: ${{ github.repository_owner }} | ||||||
| @@ -160,7 +160,7 @@ jobs: | |||||||
|         run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV |         run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV | ||||||
|  |  | ||||||
|       - name: Build supervisor |       - name: Build supervisor | ||||||
|         uses: home-assistant/builder@2023.09.0 |         uses: home-assistant/builder@2024.03.5 | ||||||
|         with: |         with: | ||||||
|           args: | |           args: | | ||||||
|             $BUILD_ARGS \ |             $BUILD_ARGS \ | ||||||
| @@ -178,7 +178,7 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|  |  | ||||||
|       - name: Initialize git |       - name: Initialize git | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
| @@ -203,11 +203,11 @@ jobs: | |||||||
|     timeout-minutes: 60 |     timeout-minutes: 60 | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|  |  | ||||||
|       - name: Build the Supervisor |       - name: Build the Supervisor | ||||||
|         if: needs.init.outputs.publish != 'true' |         if: needs.init.outputs.publish != 'true' | ||||||
|         uses: home-assistant/builder@2023.09.0 |         uses: home-assistant/builder@2024.03.5 | ||||||
|         with: |         with: | ||||||
|           args: | |           args: | | ||||||
|             --test \ |             --test \ | ||||||
| @@ -324,7 +324,7 @@ jobs: | |||||||
|           if [ "$(echo $test | jq -r '.result')" != "ok" ]; then |           if [ "$(echo $test | jq -r '.result')" != "ok" ]; then | ||||||
|             exit 1 |             exit 1 | ||||||
|           fi |           fi | ||||||
|           echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')" |           echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT" | ||||||
|  |  | ||||||
|       - name: Uninstall SSH add-on |       - name: Uninstall SSH add-on | ||||||
|         run: | |         run: | | ||||||
|   | |||||||
							
								
								
									
										250
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										250
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -8,8 +8,8 @@ on: | |||||||
|   pull_request: ~ |   pull_request: ~ | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   DEFAULT_PYTHON: "3.11" |   DEFAULT_PYTHON: "3.12" | ||||||
|   PRE_COMMIT_HOME: ~/.cache/pre-commit |   PRE_COMMIT_CACHE: ~/.cache/pre-commit | ||||||
|  |  | ||||||
| concurrency: | concurrency: | ||||||
|   group: "${{ github.workflow }}-${{ github.ref }}" |   group: "${{ github.workflow }}-${{ github.ref }}" | ||||||
| @@ -25,15 +25,15 @@ jobs: | |||||||
|     name: Prepare Python dependencies |     name: Prepare Python dependencies | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Set up Python |       - name: Set up Python | ||||||
|         id: python |         id: python | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ env.DEFAULT_PYTHON }} |           python-version: ${{ env.DEFAULT_PYTHON }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -47,9 +47,10 @@ jobs: | |||||||
|           pip install -r requirements.txt -r requirements_tests.txt |           pip install -r requirements.txt -r requirements_tests.txt | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|  |           lookup-only: true | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|           restore-keys: | |           restore-keys: | | ||||||
| @@ -60,21 +61,21 @@ jobs: | |||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
|           pre-commit install-hooks |           pre-commit install-hooks | ||||||
|  |  | ||||||
|   lint-black: |   lint-ruff-format: | ||||||
|     name: Check black |     name: Check ruff-format | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -84,10 +85,67 @@ jobs: | |||||||
|         run: | |         run: | | ||||||
|           echo "Failed to restore Python virtual environment from cache" |           echo "Failed to restore Python virtual environment from cache" | ||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Run black |       - name: Restore pre-commit environment from cache | ||||||
|  |         id: cache-precommit | ||||||
|  |         uses: actions/cache@v4.0.2 | ||||||
|  |         with: | ||||||
|  |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|  |           key: | | ||||||
|  |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|  |       - name: Fail job if cache restore failed | ||||||
|  |         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||||
|  |         run: | | ||||||
|  |           echo "Failed to restore Python virtual environment from cache" | ||||||
|  |           exit 1 | ||||||
|  |       - name: Run ruff-format | ||||||
|         run: | |         run: | | ||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
|           black --target-version py311 --check supervisor tests setup.py |           pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure | ||||||
|  |         env: | ||||||
|  |           RUFF_OUTPUT_FORMAT: github | ||||||
|  |  | ||||||
|  |   lint-ruff: | ||||||
|  |     name: Check ruff | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     needs: prepare | ||||||
|  |     steps: | ||||||
|  |       - name: Check out code from GitHub | ||||||
|  |         uses: actions/checkout@v4.1.6 | ||||||
|  |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|  |         uses: actions/setup-python@v5.1.0 | ||||||
|  |         id: python | ||||||
|  |         with: | ||||||
|  |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|  |       - name: Restore Python virtual environment | ||||||
|  |         id: cache-venv | ||||||
|  |         uses: actions/cache@v4.0.2 | ||||||
|  |         with: | ||||||
|  |           path: venv | ||||||
|  |           key: | | ||||||
|  |             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} | ||||||
|  |       - name: Fail job if Python cache restore failed | ||||||
|  |         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||||
|  |         run: | | ||||||
|  |           echo "Failed to restore Python virtual environment from cache" | ||||||
|  |           exit 1 | ||||||
|  |       - name: Restore pre-commit environment from cache | ||||||
|  |         id: cache-precommit | ||||||
|  |         uses: actions/cache@v4.0.2 | ||||||
|  |         with: | ||||||
|  |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|  |           key: | | ||||||
|  |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|  |       - name: Fail job if cache restore failed | ||||||
|  |         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||||
|  |         run: | | ||||||
|  |           echo "Failed to restore Python virtual environment from cache" | ||||||
|  |           exit 1 | ||||||
|  |       - name: Run ruff | ||||||
|  |         run: | | ||||||
|  |           . venv/bin/activate | ||||||
|  |           pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure | ||||||
|  |         env: | ||||||
|  |           RUFF_OUTPUT_FORMAT: github | ||||||
|  |  | ||||||
|   lint-dockerfile: |   lint-dockerfile: | ||||||
|     name: Check Dockerfile |     name: Check Dockerfile | ||||||
| @@ -95,7 +153,7 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Register hadolint problem matcher |       - name: Register hadolint problem matcher | ||||||
|         run: | |         run: | | ||||||
|           echo "::add-matcher::.github/workflows/matchers/hadolint.json" |           echo "::add-matcher::.github/workflows/matchers/hadolint.json" | ||||||
| @@ -110,15 +168,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -130,9 +188,9 @@ jobs: | |||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|       - name: Fail job if cache restore failed |       - name: Fail job if cache restore failed | ||||||
| @@ -148,94 +206,21 @@ jobs: | |||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
|           pre-commit run --hook-stage manual check-executables-have-shebangs --all-files |           pre-commit run --hook-stage manual check-executables-have-shebangs --all-files | ||||||
|  |  | ||||||
|   lint-flake8: |  | ||||||
|     name: Check flake8 |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: prepare |  | ||||||
|     steps: |  | ||||||
|       - name: Check out code from GitHub |  | ||||||
|         uses: actions/checkout@v4.1.1 |  | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |  | ||||||
|         uses: actions/setup-python@v4.7.1 |  | ||||||
|         id: python |  | ||||||
|         with: |  | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |  | ||||||
|       - name: Restore Python virtual environment |  | ||||||
|         id: cache-venv |  | ||||||
|         uses: actions/cache@v3.3.2 |  | ||||||
|         with: |  | ||||||
|           path: venv |  | ||||||
|           key: | |  | ||||||
|             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} |  | ||||||
|       - name: Fail job if Python cache restore failed |  | ||||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' |  | ||||||
|         run: | |  | ||||||
|           echo "Failed to restore Python virtual environment from cache" |  | ||||||
|           exit 1 |  | ||||||
|       - name: Register flake8 problem matcher |  | ||||||
|         run: | |  | ||||||
|           echo "::add-matcher::.github/workflows/matchers/flake8.json" |  | ||||||
|       - name: Run flake8 |  | ||||||
|         run: | |  | ||||||
|           . venv/bin/activate |  | ||||||
|           flake8 supervisor tests |  | ||||||
|  |  | ||||||
|   lint-isort: |  | ||||||
|     name: Check isort |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: prepare |  | ||||||
|     steps: |  | ||||||
|       - name: Check out code from GitHub |  | ||||||
|         uses: actions/checkout@v4.1.1 |  | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |  | ||||||
|         uses: actions/setup-python@v4.7.1 |  | ||||||
|         id: python |  | ||||||
|         with: |  | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |  | ||||||
|       - name: Restore Python virtual environment |  | ||||||
|         id: cache-venv |  | ||||||
|         uses: actions/cache@v3.3.2 |  | ||||||
|         with: |  | ||||||
|           path: venv |  | ||||||
|           key: | |  | ||||||
|             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} |  | ||||||
|       - name: Fail job if Python cache restore failed |  | ||||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' |  | ||||||
|         run: | |  | ||||||
|           echo "Failed to restore Python virtual environment from cache" |  | ||||||
|           exit 1 |  | ||||||
|       - name: Restore pre-commit environment from cache |  | ||||||
|         id: cache-precommit |  | ||||||
|         uses: actions/cache@v3.3.2 |  | ||||||
|         with: |  | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |  | ||||||
|           key: | |  | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |  | ||||||
|       - name: Fail job if cache restore failed |  | ||||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' |  | ||||||
|         run: | |  | ||||||
|           echo "Failed to restore Python virtual environment from cache" |  | ||||||
|           exit 1 |  | ||||||
|       - name: Run isort |  | ||||||
|         run: | |  | ||||||
|           . venv/bin/activate |  | ||||||
|           pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure |  | ||||||
|  |  | ||||||
|   lint-json: |   lint-json: | ||||||
|     name: Check JSON |     name: Check JSON | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -247,9 +232,9 @@ jobs: | |||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|       - name: Fail job if cache restore failed |       - name: Fail job if cache restore failed | ||||||
| @@ -271,15 +256,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -297,66 +282,25 @@ jobs: | |||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
|           pylint supervisor tests |           pylint supervisor tests | ||||||
|  |  | ||||||
|   lint-pyupgrade: |  | ||||||
|     name: Check pyupgrade |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: prepare |  | ||||||
|     steps: |  | ||||||
|       - name: Check out code from GitHub |  | ||||||
|         uses: actions/checkout@v4.1.1 |  | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |  | ||||||
|         uses: actions/setup-python@v4.7.1 |  | ||||||
|         id: python |  | ||||||
|         with: |  | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |  | ||||||
|       - name: Restore Python virtual environment |  | ||||||
|         id: cache-venv |  | ||||||
|         uses: actions/cache@v3.3.2 |  | ||||||
|         with: |  | ||||||
|           path: venv |  | ||||||
|           key: | |  | ||||||
|             ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} |  | ||||||
|       - name: Fail job if Python cache restore failed |  | ||||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' |  | ||||||
|         run: | |  | ||||||
|           echo "Failed to restore Python virtual environment from cache" |  | ||||||
|           exit 1 |  | ||||||
|       - name: Restore pre-commit environment from cache |  | ||||||
|         id: cache-precommit |  | ||||||
|         uses: actions/cache@v3.3.2 |  | ||||||
|         with: |  | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |  | ||||||
|           key: | |  | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |  | ||||||
|       - name: Fail job if cache restore failed |  | ||||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' |  | ||||||
|         run: | |  | ||||||
|           echo "Failed to restore Python virtual environment from cache" |  | ||||||
|           exit 1 |  | ||||||
|       - name: Run pyupgrade |  | ||||||
|         run: | |  | ||||||
|           . venv/bin/activate |  | ||||||
|           pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure |  | ||||||
|  |  | ||||||
|   pytest: |   pytest: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     needs: prepare |     needs: prepare | ||||||
|     name: Run tests Python ${{ needs.prepare.outputs.python-version }} |     name: Run tests Python ${{ needs.prepare.outputs.python-version }} | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Install Cosign |       - name: Install Cosign | ||||||
|         uses: sigstore/cosign-installer@v3.2.0 |         uses: sigstore/cosign-installer@v3.5.0 | ||||||
|         with: |         with: | ||||||
|           cosign-release: "v2.0.2" |           cosign-release: "v2.2.3" | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -391,7 +335,7 @@ jobs: | |||||||
|             -o console_output_style=count \ |             -o console_output_style=count \ | ||||||
|             tests |             tests | ||||||
|       - name: Upload coverage artifact |       - name: Upload coverage artifact | ||||||
|         uses: actions/upload-artifact@v3.1.3 |         uses: actions/upload-artifact@v4.3.3 | ||||||
|         with: |         with: | ||||||
|           name: coverage-${{ matrix.python-version }} |           name: coverage-${{ matrix.python-version }} | ||||||
|           path: .coverage |           path: .coverage | ||||||
| @@ -402,15 +346,15 @@ jobs: | |||||||
|     needs: ["pytest", "prepare"] |     needs: ["pytest", "prepare"] | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.1 |         uses: actions/setup-python@v5.1.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v4.0.2 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -421,7 +365,7 @@ jobs: | |||||||
|           echo "Failed to restore Python virtual environment from cache" |           echo "Failed to restore Python virtual environment from cache" | ||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Download all coverage artifacts |       - name: Download all coverage artifacts | ||||||
|         uses: actions/download-artifact@v3 |         uses: actions/download-artifact@v4.1.7 | ||||||
|       - name: Combine coverage results |       - name: Combine coverage results | ||||||
|         run: | |         run: | | ||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
| @@ -429,4 +373,4 @@ jobs: | |||||||
|           coverage report |           coverage report | ||||||
|           coverage xml |           coverage xml | ||||||
|       - name: Upload coverage to Codecov |       - name: Upload coverage to Codecov | ||||||
|         uses: codecov/codecov-action@v3.1.4 |         uses: codecov/codecov-action@v4.4.1 | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							| @@ -9,7 +9,7 @@ jobs: | |||||||
|   lock: |   lock: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: dessant/lock-threads@v5.0.0 |       - uses: dessant/lock-threads@v5.0.1 | ||||||
|         with: |         with: | ||||||
|           github-token: ${{ github.token }} |           github-token: ${{ github.token }} | ||||||
|           issue-inactive-days: "30" |           issue-inactive-days: "30" | ||||||
|   | |||||||
							
								
								
									
										30
									
								
								.github/workflows/matchers/flake8.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										30
									
								
								.github/workflows/matchers/flake8.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,30 +0,0 @@ | |||||||
| { |  | ||||||
|   "problemMatcher": [ |  | ||||||
|     { |  | ||||||
|       "owner": "flake8-error", |  | ||||||
|       "severity": "error", |  | ||||||
|       "pattern": [ |  | ||||||
|         { |  | ||||||
|           "regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$", |  | ||||||
|           "file": 1, |  | ||||||
|           "line": 2, |  | ||||||
|           "column": 3, |  | ||||||
|           "message": 4 |  | ||||||
|         } |  | ||||||
|       ] |  | ||||||
|     }, |  | ||||||
|     { |  | ||||||
|       "owner": "flake8-warning", |  | ||||||
|       "severity": "warning", |  | ||||||
|       "pattern": [ |  | ||||||
|         { |  | ||||||
|           "regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$", |  | ||||||
|           "file": 1, |  | ||||||
|           "line": 2, |  | ||||||
|           "column": 3, |  | ||||||
|           "message": 4 |  | ||||||
|         } |  | ||||||
|       ] |  | ||||||
|     } |  | ||||||
|   ] |  | ||||||
| } |  | ||||||
							
								
								
									
										6
									
								
								.github/workflows/release-drafter.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/release-drafter.yml
									
									
									
									
										vendored
									
									
								
							| @@ -11,7 +11,7 @@ jobs: | |||||||
|     name: Release Drafter |     name: Release Drafter | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|  |  | ||||||
| @@ -33,10 +33,10 @@ jobs: | |||||||
|  |  | ||||||
|           echo Current version:    $latest |           echo Current version:    $latest | ||||||
|           echo New target version: $datepre.$newpost |           echo New target version: $datepre.$newpost | ||||||
|           echo "::set-output name=version::$datepre.$newpost" |           echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT" | ||||||
|  |  | ||||||
|       - name: Run Release Drafter |       - name: Run Release Drafter | ||||||
|         uses: release-drafter/release-drafter@v5.25.0 |         uses: release-drafter/release-drafter@v6.0.0 | ||||||
|         with: |         with: | ||||||
|           tag: ${{ steps.version.outputs.version }} |           tag: ${{ steps.version.outputs.version }} | ||||||
|           name: ${{ steps.version.outputs.version }} |           name: ${{ steps.version.outputs.version }} | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								.github/workflows/sentry.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/sentry.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -10,9 +10,9 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.1.1 |         uses: actions/checkout@v4.1.6 | ||||||
|       - name: Sentry Release |       - name: Sentry Release | ||||||
|         uses: getsentry/action-release@v1.4.1 |         uses: getsentry/action-release@v1.7.0 | ||||||
|         env: |         env: | ||||||
|           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} |           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} | ||||||
|           SENTRY_ORG: ${{ secrets.SENTRY_ORG }} |           SENTRY_ORG: ${{ secrets.SENTRY_ORG }} | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							| @@ -9,7 +9,7 @@ jobs: | |||||||
|   stale: |   stale: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/stale@v8.0.0 |       - uses: actions/stale@v9.0.0 | ||||||
|         with: |         with: | ||||||
|           repo-token: ${{ secrets.GITHUB_TOKEN }} |           repo-token: ${{ secrets.GITHUB_TOKEN }} | ||||||
|           days-before-stale: 30 |           days-before-stale: 30 | ||||||
|   | |||||||
| @@ -1,34 +1,15 @@ | |||||||
| repos: | repos: | ||||||
|   - repo: https://github.com/psf/black |   - repo: https://github.com/astral-sh/ruff-pre-commit | ||||||
|     rev: 23.1.0 |     rev: v0.2.1 | ||||||
|     hooks: |     hooks: | ||||||
|       - id: black |       - id: ruff | ||||||
|         args: |         args: | ||||||
|           - --safe |           - --fix | ||||||
|           - --quiet |       - id: ruff-format | ||||||
|           - --target-version |  | ||||||
|           - py311 |  | ||||||
|         files: ^((supervisor|tests)/.+)?[^/]+\.py$ |         files: ^((supervisor|tests)/.+)?[^/]+\.py$ | ||||||
|   - repo: https://github.com/PyCQA/flake8 |  | ||||||
|     rev: 6.0.0 |  | ||||||
|     hooks: |  | ||||||
|       - id: flake8 |  | ||||||
|         additional_dependencies: |  | ||||||
|           - flake8-docstrings==1.7.0 |  | ||||||
|           - pydocstyle==6.3.0 |  | ||||||
|         files: ^(supervisor|script|tests)/.+\.py$ |  | ||||||
|   - repo: https://github.com/pre-commit/pre-commit-hooks |   - repo: https://github.com/pre-commit/pre-commit-hooks | ||||||
|     rev: v4.3.0 |     rev: v4.5.0 | ||||||
|     hooks: |     hooks: | ||||||
|       - id: check-executables-have-shebangs |       - id: check-executables-have-shebangs | ||||||
|         stages: [manual] |         stages: [manual] | ||||||
|       - id: check-json |       - id: check-json | ||||||
|   - repo: https://github.com/PyCQA/isort |  | ||||||
|     rev: 5.12.0 |  | ||||||
|     hooks: |  | ||||||
|       - id: isort |  | ||||||
|   - repo: https://github.com/asottile/pyupgrade |  | ||||||
|     rev: v3.15.0 |  | ||||||
|     hooks: |  | ||||||
|       - id: pyupgrade |  | ||||||
|         args: [--py311-plus] |  | ||||||
|   | |||||||
							
								
								
									
										18
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @@ -58,9 +58,23 @@ | |||||||
|       "problemMatcher": [] |       "problemMatcher": [] | ||||||
|     }, |     }, | ||||||
|     { |     { | ||||||
|       "label": "Flake8", |       "label": "Ruff Check", | ||||||
|       "type": "shell", |       "type": "shell", | ||||||
|       "command": "flake8 supervisor tests", |       "command": "ruff check --fix supervisor tests", | ||||||
|  |       "group": { | ||||||
|  |         "kind": "test", | ||||||
|  |         "isDefault": true | ||||||
|  |       }, | ||||||
|  |       "presentation": { | ||||||
|  |         "reveal": "always", | ||||||
|  |         "panel": "new" | ||||||
|  |       }, | ||||||
|  |       "problemMatcher": [] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "label": "Ruff Format", | ||||||
|  |       "type": "shell", | ||||||
|  |       "command": "ruff format supervisor tests", | ||||||
|       "group": { |       "group": { | ||||||
|         "kind": "test", |         "kind": "test", | ||||||
|         "isDefault": true |         "isDefault": true | ||||||
|   | |||||||
| @@ -15,7 +15,7 @@ WORKDIR /usr/src | |||||||
| RUN \ | RUN \ | ||||||
|     set -x \ |     set -x \ | ||||||
|     && apk add --no-cache \ |     && apk add --no-cache \ | ||||||
|         coreutils \ |         findutils \ | ||||||
|         eudev \ |         eudev \ | ||||||
|         eudev-libs \ |         eudev-libs \ | ||||||
|         git \ |         git \ | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								build.yaml
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								build.yaml
									
									
									
									
									
								
							| @@ -1,10 +1,10 @@ | |||||||
| image: ghcr.io/home-assistant/{arch}-hassio-supervisor | image: ghcr.io/home-assistant/{arch}-hassio-supervisor | ||||||
| build_from: | build_from: | ||||||
|   aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.18 |   aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.19 | ||||||
|   armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.18 |   armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.19 | ||||||
|   armv7: ghcr.io/home-assistant/armv7-base-python:3.11-alpine3.18 |   armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.19 | ||||||
|   amd64: ghcr.io/home-assistant/amd64-base-python:3.11-alpine3.18 |   amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.19 | ||||||
|   i386: ghcr.io/home-assistant/i386-base-python:3.11-alpine3.18 |   i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.19 | ||||||
| codenotary: | codenotary: | ||||||
|   signer: notary@home-assistant.io |   signer: notary@home-assistant.io | ||||||
|   base_image: notary@home-assistant.io |   base_image: notary@home-assistant.io | ||||||
| @@ -12,7 +12,7 @@ cosign: | |||||||
|   base_identity: https://github.com/home-assistant/docker-base/.* |   base_identity: https://github.com/home-assistant/docker-base/.* | ||||||
|   identity: https://github.com/home-assistant/supervisor/.* |   identity: https://github.com/home-assistant/supervisor/.* | ||||||
| args: | args: | ||||||
|   COSIGN_VERSION: 2.0.2 |   COSIGN_VERSION: 2.2.3 | ||||||
| labels: | labels: | ||||||
|   io.hass.type: supervisor |   io.hass.type: supervisor | ||||||
|   org.opencontainers.image.title: Home Assistant Supervisor |   org.opencontainers.image.title: Home Assistant Supervisor | ||||||
|   | |||||||
							
								
								
									
										45
									
								
								pylintrc
									
									
									
									
									
								
							
							
						
						
									
										45
									
								
								pylintrc
									
									
									
									
									
								
							| @@ -1,45 +0,0 @@ | |||||||
| [MASTER] |  | ||||||
| reports=no |  | ||||||
| jobs=2 |  | ||||||
|  |  | ||||||
| good-names=id,i,j,k,ex,Run,_,fp,T,os |  | ||||||
|  |  | ||||||
| extension-pkg-whitelist= |  | ||||||
|   ciso8601 |  | ||||||
|  |  | ||||||
| # Reasons disabled: |  | ||||||
| # format - handled by black |  | ||||||
| # locally-disabled - it spams too much |  | ||||||
| # duplicate-code - unavoidable |  | ||||||
| # cyclic-import - doesn't test if both import on load |  | ||||||
| # abstract-class-not-used - is flaky, should not show up but does |  | ||||||
| # unused-argument - generic callbacks and setup methods create a lot of warnings |  | ||||||
| # too-many-* - are not enforced for the sake of readability |  | ||||||
| # too-few-* - same as too-many-* |  | ||||||
| # abstract-method - with intro of async there are always methods missing |  | ||||||
| disable= |  | ||||||
|   format, |  | ||||||
|   abstract-method, |  | ||||||
|   cyclic-import, |  | ||||||
|   duplicate-code, |  | ||||||
|   locally-disabled, |  | ||||||
|   no-else-return, |  | ||||||
|   not-context-manager, |  | ||||||
|   too-few-public-methods, |  | ||||||
|   too-many-arguments, |  | ||||||
|   too-many-branches, |  | ||||||
|   too-many-instance-attributes, |  | ||||||
|   too-many-lines, |  | ||||||
|   too-many-locals, |  | ||||||
|   too-many-public-methods, |  | ||||||
|   too-many-return-statements, |  | ||||||
|   too-many-statements, |  | ||||||
|   unused-argument, |  | ||||||
|   consider-using-with |  | ||||||
|  |  | ||||||
| [EXCEPTIONS] |  | ||||||
| overgeneral-exceptions=builtins.Exception |  | ||||||
|  |  | ||||||
|  |  | ||||||
| [TYPECHECK] |  | ||||||
| ignored-modules = distutils |  | ||||||
							
								
								
									
										371
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										371
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,371 @@ | |||||||
|  | [build-system] | ||||||
|  | requires = ["setuptools~=68.0.0", "wheel~=0.40.0"] | ||||||
|  | build-backend = "setuptools.build_meta" | ||||||
|  |  | ||||||
|  | [project] | ||||||
|  | name = "Supervisor" | ||||||
|  | dynamic = ["version", "dependencies"] | ||||||
|  | license = { text = "Apache-2.0" } | ||||||
|  | description = "Open-source private cloud os for Home-Assistant based on HassOS" | ||||||
|  | readme = "README.md" | ||||||
|  | authors = [ | ||||||
|  |     { name = "The Home Assistant Authors", email = "hello@home-assistant.io" }, | ||||||
|  | ] | ||||||
|  | keywords = ["docker", "home-assistant", "api"] | ||||||
|  | requires-python = ">=3.12.0" | ||||||
|  |  | ||||||
|  | [project.urls] | ||||||
|  | "Homepage" = "https://www.home-assistant.io/" | ||||||
|  | "Source Code" = "https://github.com/home-assistant/supervisor" | ||||||
|  | "Bug Reports" = "https://github.com/home-assistant/supervisor/issues" | ||||||
|  | "Docs: Dev" = "https://developers.home-assistant.io/" | ||||||
|  | "Discord" = "https://www.home-assistant.io/join-chat/" | ||||||
|  | "Forum" = "https://community.home-assistant.io/" | ||||||
|  |  | ||||||
|  | [tool.setuptools] | ||||||
|  | platforms = ["any"] | ||||||
|  | zip-safe = false | ||||||
|  | include-package-data = true | ||||||
|  |  | ||||||
|  | [tool.setuptools.packages.find] | ||||||
|  | include = ["supervisor*"] | ||||||
|  |  | ||||||
|  | [tool.pylint.MAIN] | ||||||
|  | py-version = "3.11" | ||||||
|  | # Use a conservative default here; 2 should speed up most setups and not hurt | ||||||
|  | # any too bad. Override on command line as appropriate. | ||||||
|  | jobs = 2 | ||||||
|  | persistent = false | ||||||
|  | extension-pkg-allow-list = ["ciso8601"] | ||||||
|  |  | ||||||
|  | [tool.pylint.BASIC] | ||||||
|  | class-const-naming-style = "any" | ||||||
|  | good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"] | ||||||
|  |  | ||||||
|  | [tool.pylint."MESSAGES CONTROL"] | ||||||
|  | # Reasons disabled: | ||||||
|  | # format - handled by ruff | ||||||
|  | # abstract-method - with intro of async there are always methods missing | ||||||
|  | # cyclic-import - doesn't test if both import on load | ||||||
|  | # duplicate-code - unavoidable | ||||||
|  | # locally-disabled - it spams too much | ||||||
|  | # too-many-* - are not enforced for the sake of readability | ||||||
|  | # too-few-* - same as too-many-* | ||||||
|  | # unused-argument - generic callbacks and setup methods create a lot of warnings | ||||||
|  | disable = [ | ||||||
|  |     "format", | ||||||
|  |     "abstract-method", | ||||||
|  |     "cyclic-import", | ||||||
|  |     "duplicate-code", | ||||||
|  |     "locally-disabled", | ||||||
|  |     "no-else-return", | ||||||
|  |     "not-context-manager", | ||||||
|  |     "too-few-public-methods", | ||||||
|  |     "too-many-arguments", | ||||||
|  |     "too-many-branches", | ||||||
|  |     "too-many-instance-attributes", | ||||||
|  |     "too-many-lines", | ||||||
|  |     "too-many-locals", | ||||||
|  |     "too-many-public-methods", | ||||||
|  |     "too-many-return-statements", | ||||||
|  |     "too-many-statements", | ||||||
|  |     "unused-argument", | ||||||
|  |     "consider-using-with", | ||||||
|  |  | ||||||
|  |     # Handled by ruff | ||||||
|  |     # Ref: <https://github.com/astral-sh/ruff/issues/970> | ||||||
|  |     "await-outside-async",    # PLE1142 | ||||||
|  |     "bad-str-strip-call",     # PLE1310 | ||||||
|  |     "bad-string-format-type", # PLE1307 | ||||||
|  |     "bidirectional-unicode",  # PLE2502 | ||||||
|  |     "continue-in-finally",    # PLE0116 | ||||||
|  |     "duplicate-bases",        # PLE0241 | ||||||
|  |     "format-needs-mapping",   # F502 | ||||||
|  |     "function-redefined",     # F811 | ||||||
|  |     # Needed because ruff does not understand type of __all__ generated by a function | ||||||
|  |     # "invalid-all-format", # PLE0605 | ||||||
|  |     "invalid-all-object",                 # PLE0604 | ||||||
|  |     "invalid-character-backspace",        # PLE2510 | ||||||
|  |     "invalid-character-esc",              # PLE2513 | ||||||
|  |     "invalid-character-nul",              # PLE2514 | ||||||
|  |     "invalid-character-sub",              # PLE2512 | ||||||
|  |     "invalid-character-zero-width-space", # PLE2515 | ||||||
|  |     "logging-too-few-args",               # PLE1206 | ||||||
|  |     "logging-too-many-args",              # PLE1205 | ||||||
|  |     "missing-format-string-key",          # F524 | ||||||
|  |     "mixed-format-string",                # F506 | ||||||
|  |     "no-method-argument",                 # N805 | ||||||
|  |     "no-self-argument",                   # N805 | ||||||
|  |     "nonexistent-operator",               # B002 | ||||||
|  |     "nonlocal-without-binding",           # PLE0117 | ||||||
|  |     "not-in-loop",                        # F701, F702 | ||||||
|  |     "notimplemented-raised",              # F901 | ||||||
|  |     "return-in-init",                     # PLE0101 | ||||||
|  |     "return-outside-function",            # F706 | ||||||
|  |     "syntax-error",                       # E999 | ||||||
|  |     "too-few-format-args",                # F524 | ||||||
|  |     "too-many-format-args",               # F522 | ||||||
|  |     "too-many-star-expressions",          # F622 | ||||||
|  |     "truncated-format-string",            # F501 | ||||||
|  |     "undefined-all-variable",             # F822 | ||||||
|  |     "undefined-variable",                 # F821 | ||||||
|  |     "used-prior-global-declaration",      # PLE0118 | ||||||
|  |     "yield-inside-async-function",        # PLE1700 | ||||||
|  |     "yield-outside-function",             # F704 | ||||||
|  |     "anomalous-backslash-in-string",      # W605 | ||||||
|  |     "assert-on-string-literal",           # PLW0129 | ||||||
|  |     "assert-on-tuple",                    # F631 | ||||||
|  |     "bad-format-string",                  # W1302, F | ||||||
|  |     "bad-format-string-key",              # W1300, F | ||||||
|  |     "bare-except",                        # E722 | ||||||
|  |     "binary-op-exception",                # PLW0711 | ||||||
|  |     "cell-var-from-loop",                 # B023 | ||||||
|  |     # "dangerous-default-value", # B006, ruff catches new occurrences, needs more work | ||||||
|  |     "duplicate-except",                     # B014 | ||||||
|  |     "duplicate-key",                        # F601 | ||||||
|  |     "duplicate-string-formatting-argument", # F | ||||||
|  |     "duplicate-value",                      # F | ||||||
|  |     "eval-used",                            # PGH001 | ||||||
|  |     "exec-used",                            # S102 | ||||||
|  |     # "expression-not-assigned", # B018, ruff catches new occurrences, needs more work | ||||||
|  |     "f-string-without-interpolation",      # F541 | ||||||
|  |     "forgotten-debug-statement",           # T100 | ||||||
|  |     "format-string-without-interpolation", # F | ||||||
|  |     # "global-statement", # PLW0603, ruff catches new occurrences, needs more work | ||||||
|  |     "global-variable-not-assigned",  # PLW0602 | ||||||
|  |     "implicit-str-concat",           # ISC001 | ||||||
|  |     "import-self",                   # PLW0406 | ||||||
|  |     "inconsistent-quotes",           # Q000 | ||||||
|  |     "invalid-envvar-default",        # PLW1508 | ||||||
|  |     "keyword-arg-before-vararg",     # B026 | ||||||
|  |     "logging-format-interpolation",  # G | ||||||
|  |     "logging-fstring-interpolation", # G | ||||||
|  |     "logging-not-lazy",              # G | ||||||
|  |     "misplaced-future",              # F404 | ||||||
|  |     "named-expr-without-context",    # PLW0131 | ||||||
|  |     "nested-min-max",                # PLW3301 | ||||||
|  |     # "pointless-statement", # B018, ruff catches new occurrences, needs more work | ||||||
|  |     "raise-missing-from", # TRY200 | ||||||
|  |     # "redefined-builtin", # A001, ruff is way more stricter, needs work | ||||||
|  |     "try-except-raise",               # TRY302 | ||||||
|  |     "unused-argument",                # ARG001, we don't use it | ||||||
|  |     "unused-format-string-argument",  #F507 | ||||||
|  |     "unused-format-string-key",       # F504 | ||||||
|  |     "unused-import",                  # F401 | ||||||
|  |     "unused-variable",                # F841 | ||||||
|  |     "useless-else-on-loop",           # PLW0120 | ||||||
|  |     "wildcard-import",                # F403 | ||||||
|  |     "bad-classmethod-argument",       # N804 | ||||||
|  |     "consider-iterating-dictionary",  # SIM118 | ||||||
|  |     "empty-docstring",                # D419 | ||||||
|  |     "invalid-name",                   # N815 | ||||||
|  |     "line-too-long",                  # E501, disabled globally | ||||||
|  |     "missing-class-docstring",        # D101 | ||||||
|  |     "missing-final-newline",          # W292 | ||||||
|  |     "missing-function-docstring",     # D103 | ||||||
|  |     "missing-module-docstring",       # D100 | ||||||
|  |     "multiple-imports",               #E401 | ||||||
|  |     "singleton-comparison",           # E711, E712 | ||||||
|  |     "subprocess-run-check",           # PLW1510 | ||||||
|  |     "superfluous-parens",             # UP034 | ||||||
|  |     "ungrouped-imports",              # I001 | ||||||
|  |     "unidiomatic-typecheck",          # E721 | ||||||
|  |     "unnecessary-direct-lambda-call", # PLC3002 | ||||||
|  |     "unnecessary-lambda-assignment",  # PLC3001 | ||||||
|  |     "unneeded-not",                   # SIM208 | ||||||
|  |     "useless-import-alias",           # PLC0414 | ||||||
|  |     "wrong-import-order",             # I001 | ||||||
|  |     "wrong-import-position",          # E402 | ||||||
|  |     "comparison-of-constants",        # PLR0133 | ||||||
|  |     "comparison-with-itself",         # PLR0124 | ||||||
|  |     # "consider-alternative-union-syntax", # UP007, typing extension | ||||||
|  |     "consider-merging-isinstance", # PLR1701 | ||||||
|  |     # "consider-using-alias",              # UP006, typing extension | ||||||
|  |     "consider-using-dict-comprehension", # C402 | ||||||
|  |     "consider-using-generator",          # C417 | ||||||
|  |     "consider-using-get",                # SIM401 | ||||||
|  |     "consider-using-set-comprehension",  # C401 | ||||||
|  |     "consider-using-sys-exit",           # PLR1722 | ||||||
|  |     "consider-using-ternary",            # SIM108 | ||||||
|  |     "literal-comparison",                # F632 | ||||||
|  |     "property-with-parameters",          # PLR0206 | ||||||
|  |     "super-with-arguments",              # UP008 | ||||||
|  |     "too-many-branches",                 # PLR0912 | ||||||
|  |     "too-many-return-statements",        # PLR0911 | ||||||
|  |     "too-many-statements",               # PLR0915 | ||||||
|  |     "trailing-comma-tuple",              # COM818 | ||||||
|  |     "unnecessary-comprehension",         # C416 | ||||||
|  |     "use-a-generator",                   # C417 | ||||||
|  |     "use-dict-literal",                  # C406 | ||||||
|  |     "use-list-literal",                  # C405 | ||||||
|  |     "useless-object-inheritance",        # UP004 | ||||||
|  |     "useless-return",                    # PLR1711 | ||||||
|  |     # "no-self-use", # PLR6301  # Optional plugin, not enabled | ||||||
|  | ] | ||||||
|  |  | ||||||
|  | [tool.pylint.REPORTS] | ||||||
|  | score = false | ||||||
|  |  | ||||||
|  | [tool.pylint.TYPECHECK] | ||||||
|  | ignored-modules = ["distutils"] | ||||||
|  |  | ||||||
|  | [tool.pylint.FORMAT] | ||||||
|  | expected-line-ending-format = "LF" | ||||||
|  |  | ||||||
|  | [tool.pylint.EXCEPTIONS] | ||||||
|  | overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] | ||||||
|  |  | ||||||
|  | [tool.pytest.ini_options] | ||||||
|  | testpaths = ["tests"] | ||||||
|  | norecursedirs = [".git"] | ||||||
|  | log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" | ||||||
|  | log_date_format = "%Y-%m-%d %H:%M:%S" | ||||||
|  | asyncio_mode = "auto" | ||||||
|  | filterwarnings = [ | ||||||
|  |     "error", | ||||||
|  |     "ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash", | ||||||
|  |     "ignore::pytest.PytestUnraisableExceptionWarning", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  | [tool.ruff] | ||||||
|  | select = [ | ||||||
|  |     "B002",    # Python does not support the unary prefix increment | ||||||
|  |     "B007",    # Loop control variable {name} not used within loop body | ||||||
|  |     "B014",    # Exception handler with duplicate exception | ||||||
|  |     "B023",    # Function definition does not bind loop variable {name} | ||||||
|  |     "B026",    # Star-arg unpacking after a keyword argument is strongly discouraged | ||||||
|  |     "C",       # complexity | ||||||
|  |     "COM818",  # Trailing comma on bare tuple prohibited | ||||||
|  |     "D",       # docstrings | ||||||
|  |     "DTZ003",  # Use datetime.now(tz=) instead of datetime.utcnow() | ||||||
|  |     "DTZ004",  # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) | ||||||
|  |     "E",       # pycodestyle | ||||||
|  |     "F",       # pyflakes/autoflake | ||||||
|  |     "G",       # flake8-logging-format | ||||||
|  |     "I",       # isort | ||||||
|  |     "ICN001",  # import concentions; {name} should be imported as {asname} | ||||||
|  |     "N804",    # First argument of a class method should be named cls | ||||||
|  |     "N805",    # First argument of a method should be named self | ||||||
|  |     "N815",    # Variable {name} in class scope should not be mixedCase | ||||||
|  |     "PGH001",  # No builtin eval() allowed | ||||||
|  |     "PGH004",  # Use specific rule codes when using noqa | ||||||
|  |     "PLC0414", # Useless import alias. Import alias does not rename original package. | ||||||
|  |     "PLC",     # pylint | ||||||
|  |     "PLE",     # pylint | ||||||
|  |     "PLR",     # pylint | ||||||
|  |     "PLW",     # pylint | ||||||
|  |     "Q000",    # Double quotes found but single quotes preferred | ||||||
|  |     "RUF006",  # Store a reference to the return value of asyncio.create_task | ||||||
|  |     "S102",    # Use of exec detected | ||||||
|  |     "S103",    # bad-file-permissions | ||||||
|  |     "S108",    # hardcoded-temp-file | ||||||
|  |     "S306",    # suspicious-mktemp-usage | ||||||
|  |     "S307",    # suspicious-eval-usage | ||||||
|  |     "S313",    # suspicious-xmlc-element-tree-usage | ||||||
|  |     "S314",    # suspicious-xml-element-tree-usage | ||||||
|  |     "S315",    # suspicious-xml-expat-reader-usage | ||||||
|  |     "S316",    # suspicious-xml-expat-builder-usage | ||||||
|  |     "S317",    # suspicious-xml-sax-usage | ||||||
|  |     "S318",    # suspicious-xml-mini-dom-usage | ||||||
|  |     "S319",    # suspicious-xml-pull-dom-usage | ||||||
|  |     "S320",    # suspicious-xmle-tree-usage | ||||||
|  |     "S601",    # paramiko-call | ||||||
|  |     "S602",    # subprocess-popen-with-shell-equals-true | ||||||
|  |     "S604",    # call-with-shell-equals-true | ||||||
|  |     "S608",    # hardcoded-sql-expression | ||||||
|  |     "S609",    # unix-command-wildcard-injection | ||||||
|  |     "SIM105",  # Use contextlib.suppress({exception}) instead of try-except-pass | ||||||
|  |     "SIM117",  # Merge with-statements that use the same scope | ||||||
|  |     "SIM118",  # Use {key} in {dict} instead of {key} in {dict}.keys() | ||||||
|  |     "SIM201",  # Use {left} != {right} instead of not {left} == {right} | ||||||
|  |     "SIM208",  # Use {expr} instead of not (not {expr}) | ||||||
|  |     "SIM212",  # Use {a} if {a} else {b} instead of {b} if not {a} else {a} | ||||||
|  |     "SIM300",  # Yoda conditions. Use 'age == 42' instead of '42 == age'. | ||||||
|  |     "SIM401",  # Use get from dict with default instead of an if block | ||||||
|  |     "T100",    # Trace found: {name} used | ||||||
|  |     "T20",     # flake8-print | ||||||
|  |     "TID251",  # Banned imports | ||||||
|  |     "TRY004",  # Prefer TypeError exception for invalid type | ||||||
|  |     "TRY200",  # Use raise from to specify exception cause | ||||||
|  |     "TRY302",  # Remove exception handler; error is immediately re-raised | ||||||
|  |     "UP",      # pyupgrade | ||||||
|  |     "W",       # pycodestyle | ||||||
|  | ] | ||||||
|  |  | ||||||
|  | ignore = [ | ||||||
|  |     "D202", # No blank lines allowed after function docstring | ||||||
|  |     "D203", # 1 blank line required before class docstring | ||||||
|  |     "D213", # Multi-line docstring summary should start at the second line | ||||||
|  |     "D406", # Section name should end with a newline | ||||||
|  |     "D407", # Section name underlining | ||||||
|  |     "E501", # line too long | ||||||
|  |     "E731", # do not assign a lambda expression, use a def | ||||||
|  |  | ||||||
|  |     # Ignore ignored, as the rule is now back in preview/nursery, which cannot | ||||||
|  |     # be ignored anymore without warnings. | ||||||
|  |     # https://github.com/astral-sh/ruff/issues/7491 | ||||||
|  |     # "PLC1901", # Lots of false positives | ||||||
|  |  | ||||||
|  |     # False positives https://github.com/astral-sh/ruff/issues/5386 | ||||||
|  |     "PLC0208", # Use a sequence type instead of a `set` when iterating over values | ||||||
|  |     "PLR0911", # Too many return statements ({returns} > {max_returns}) | ||||||
|  |     "PLR0912", # Too many branches ({branches} > {max_branches}) | ||||||
|  |     "PLR0913", # Too many arguments to function call ({c_args} > {max_args}) | ||||||
|  |     "PLR0915", # Too many statements ({statements} > {max_statements}) | ||||||
|  |     "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable | ||||||
|  |     "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target | ||||||
|  |     "UP006",   # keep type annotation style as is | ||||||
|  |     "UP007",   # keep type annotation style as is | ||||||
|  |     # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 | ||||||
|  |     "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` | ||||||
|  |  | ||||||
|  |     # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules | ||||||
|  |     "W191", | ||||||
|  |     "E111", | ||||||
|  |     "E114", | ||||||
|  |     "E117", | ||||||
|  |     "D206", | ||||||
|  |     "D300", | ||||||
|  |     "Q000", | ||||||
|  |     "Q001", | ||||||
|  |     "Q002", | ||||||
|  |     "Q003", | ||||||
|  |     "COM812", | ||||||
|  |     "COM819", | ||||||
|  |     "ISC001", | ||||||
|  |     "ISC002", | ||||||
|  |  | ||||||
|  |     # Disabled because ruff does not understand type of __all__ generated by a function | ||||||
|  |     "PLE0605", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  | [tool.ruff.flake8-import-conventions.extend-aliases] | ||||||
|  | voluptuous = "vol" | ||||||
|  |  | ||||||
|  | [tool.ruff.flake8-pytest-style] | ||||||
|  | fixture-parentheses = false | ||||||
|  |  | ||||||
|  | [tool.ruff.flake8-tidy-imports.banned-api] | ||||||
|  | "pytz".msg = "use zoneinfo instead" | ||||||
|  |  | ||||||
|  | [tool.ruff.isort] | ||||||
|  | force-sort-within-sections = true | ||||||
|  | section-order = [ | ||||||
|  |     "future", | ||||||
|  |     "standard-library", | ||||||
|  |     "third-party", | ||||||
|  |     "first-party", | ||||||
|  |     "local-folder", | ||||||
|  | ] | ||||||
|  | forced-separate = ["tests"] | ||||||
|  | known-first-party = ["supervisor", "tests"] | ||||||
|  | combine-as-imports = true | ||||||
|  | split-on-trailing-comma = false | ||||||
|  |  | ||||||
|  | [tool.ruff.per-file-ignores] | ||||||
|  |  | ||||||
|  | # DBus Service Mocks must use typing and names understood by dbus-fast | ||||||
|  | "tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"] | ||||||
|  |  | ||||||
|  | [tool.ruff.mccabe] | ||||||
|  | max-complexity = 25 | ||||||
| @@ -1,6 +0,0 @@ | |||||||
| [pytest] |  | ||||||
| asyncio_mode = auto |  | ||||||
| filterwarnings = |  | ||||||
|     error |  | ||||||
|     ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash |  | ||||||
|     ignore::pytest.PytestUnraisableExceptionWarning |  | ||||||
| @@ -1,27 +1,30 @@ | |||||||
| aiodns==3.1.1 | aiodns==3.2.0 | ||||||
| aiohttp==3.9.0 | aiohttp==3.9.5 | ||||||
| aiohttp-fast-url-dispatcher==0.1.1 | aiohttp-fast-url-dispatcher==0.3.0 | ||||||
| async_timeout==4.0.3 |  | ||||||
| atomicwrites-homeassistant==1.4.1 | atomicwrites-homeassistant==1.4.1 | ||||||
| attrs==23.1.0 | attrs==23.2.0 | ||||||
| awesomeversion==23.11.0 | awesomeversion==24.2.0 | ||||||
| brotli==1.1.0 | brotli==1.1.0 | ||||||
| ciso8601==2.3.1 | ciso8601==2.3.1 | ||||||
| colorlog==6.7.0 | colorlog==6.8.2 | ||||||
| cpe==1.2.1 | cpe==1.2.1 | ||||||
| cryptography==41.0.5 | cryptography==42.0.8 | ||||||
| debugpy==1.8.0 | debugpy==1.8.1 | ||||||
| deepmerge==1.1.0 | deepmerge==1.1.1 | ||||||
| dirhash==0.2.1 | dirhash==0.4.0 | ||||||
| docker==6.1.3 | docker==7.1.0 | ||||||
| faust-cchardet==2.1.19 | faust-cchardet==2.1.19 | ||||||
| gitpython==3.1.40 | gitpython==3.1.43 | ||||||
| jinja2==3.1.2 | jinja2==3.1.4 | ||||||
| pulsectl==23.5.2 | orjson==3.9.15 | ||||||
| pyudev==0.24.1 | pulsectl==24.4.0 | ||||||
|  | pyudev==0.24.3 | ||||||
| PyYAML==6.0.1 | PyYAML==6.0.1 | ||||||
| securetar==2023.3.0 | requests==2.32.3 | ||||||
| sentry-sdk==1.35.0 | securetar==2024.2.1 | ||||||
| voluptuous==0.14.1 | sentry-sdk==2.5.1 | ||||||
| dbus-fast==2.14.0 | setuptools==70.0.0 | ||||||
| typing_extensions==4.8.0 | voluptuous==0.14.2 | ||||||
|  | dbus-fast==2.21.3 | ||||||
|  | typing_extensions==4.12.2 | ||||||
|  | zlib-fast==0.2.0 | ||||||
|   | |||||||
| @@ -1,16 +1,12 @@ | |||||||
| black==23.11.0 | coverage==7.5.3 | ||||||
| coverage==7.3.2 | pre-commit==3.7.1 | ||||||
| flake8-docstrings==1.7.0 | pylint==3.2.3 | ||||||
| flake8==6.1.0 |  | ||||||
| pre-commit==3.5.0 |  | ||||||
| pydocstyle==6.3.0 |  | ||||||
| pylint==3.0.2 |  | ||||||
| pytest-aiohttp==1.0.5 | pytest-aiohttp==1.0.5 | ||||||
| pytest-asyncio==0.18.3 | pytest-asyncio==0.23.6 | ||||||
| pytest-cov==4.1.0 | pytest-cov==5.0.0 | ||||||
| pytest-timeout==2.2.0 | pytest-timeout==2.3.1 | ||||||
| pytest==7.4.3 | pytest==8.2.2 | ||||||
| pyupgrade==3.15.0 | ruff==0.4.8 | ||||||
| time-machine==2.13.0 | time-machine==2.14.1 | ||||||
| typing_extensions==4.8.0 | typing_extensions==4.12.2 | ||||||
| urllib3==2.1.0 | urllib3==2.2.1 | ||||||
|   | |||||||
							
								
								
									
										31
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										31
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,31 +0,0 @@ | |||||||
| [isort] |  | ||||||
| multi_line_output = 3 |  | ||||||
| include_trailing_comma=True |  | ||||||
| force_grid_wrap=0 |  | ||||||
| line_length=88 |  | ||||||
| indent = "    " |  | ||||||
| force_sort_within_sections = true |  | ||||||
| sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER |  | ||||||
| default_section = THIRDPARTY |  | ||||||
| forced_separate = tests |  | ||||||
| combine_as_imports = true |  | ||||||
| use_parentheses = true |  | ||||||
| known_first_party = supervisor,tests |  | ||||||
|  |  | ||||||
| [flake8] |  | ||||||
| exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build |  | ||||||
| doctests = True |  | ||||||
| max-line-length = 88 |  | ||||||
| # E501: line too long |  | ||||||
| # W503: Line break occurred before a binary operator |  | ||||||
| # E203: Whitespace before ':' |  | ||||||
| # D202 No blank lines allowed after function docstring |  | ||||||
| # W504 line break after binary operator |  | ||||||
| ignore = |  | ||||||
|     E501, |  | ||||||
|     W503, |  | ||||||
|     E203, |  | ||||||
|     D202, |  | ||||||
|     W504 |  | ||||||
| per-file-ignores = |  | ||||||
|     tests/dbus_service_mocks/*.py: F821,F722 |  | ||||||
							
								
								
									
										63
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										63
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,48 +1,27 @@ | |||||||
| """Home Assistant Supervisor setup.""" | """Home Assistant Supervisor setup.""" | ||||||
|  | from pathlib import Path | ||||||
|  | import re | ||||||
|  |  | ||||||
| from setuptools import setup | from setuptools import setup | ||||||
|  |  | ||||||
| from supervisor.const import SUPERVISOR_VERSION | RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$") | ||||||
|  |  | ||||||
|  | SUPERVISOR_DIR = Path(__file__).parent | ||||||
|  | REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt" | ||||||
|  | CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py" | ||||||
|  |  | ||||||
|  | REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8") | ||||||
|  | CONSTANTS = CONST_FILE.read_text(encoding="utf-8") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _get_supervisor_version(): | ||||||
|  |     for line in CONSTANTS.split("/n"): | ||||||
|  |         if match := RE_SUPERVISOR_VERSION.match(line): | ||||||
|  |             return match.group(1) | ||||||
|  |     return "99.9.9dev" | ||||||
|  |  | ||||||
|  |  | ||||||
| setup( | setup( | ||||||
|     name="Supervisor", |     version=_get_supervisor_version(), | ||||||
|     version=SUPERVISOR_VERSION, |     dependencies=REQUIREMENTS.split("/n"), | ||||||
|     license="BSD License", |  | ||||||
|     author="The Home Assistant Authors", |  | ||||||
|     author_email="hello@home-assistant.io", |  | ||||||
|     url="https://home-assistant.io/", |  | ||||||
|     description=("Open-source private cloud os for Home-Assistant" " based on HassOS"), |  | ||||||
|     long_description=( |  | ||||||
|         "A maintainless private cloud operator system that" |  | ||||||
|         "setup a Home-Assistant instance. Based on HassOS" |  | ||||||
|     ), |  | ||||||
|     keywords=["docker", "home-assistant", "api"], |  | ||||||
|     zip_safe=False, |  | ||||||
|     platforms="any", |  | ||||||
|     packages=[ |  | ||||||
|         "supervisor.addons", |  | ||||||
|         "supervisor.api", |  | ||||||
|         "supervisor.backups", |  | ||||||
|         "supervisor.dbus.network", |  | ||||||
|         "supervisor.dbus.network.setting", |  | ||||||
|         "supervisor.dbus", |  | ||||||
|         "supervisor.discovery.services", |  | ||||||
|         "supervisor.discovery", |  | ||||||
|         "supervisor.docker", |  | ||||||
|         "supervisor.homeassistant", |  | ||||||
|         "supervisor.host", |  | ||||||
|         "supervisor.jobs", |  | ||||||
|         "supervisor.misc", |  | ||||||
|         "supervisor.plugins", |  | ||||||
|         "supervisor.resolution.checks", |  | ||||||
|         "supervisor.resolution.evaluations", |  | ||||||
|         "supervisor.resolution.fixups", |  | ||||||
|         "supervisor.resolution", |  | ||||||
|         "supervisor.security", |  | ||||||
|         "supervisor.services.modules", |  | ||||||
|         "supervisor.services", |  | ||||||
|         "supervisor.store", |  | ||||||
|         "supervisor.utils", |  | ||||||
|         "supervisor", |  | ||||||
|     ], |  | ||||||
|     include_package_data=True, |  | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -5,8 +5,15 @@ import logging | |||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import sys | import sys | ||||||
|  |  | ||||||
| from supervisor import bootstrap | import zlib_fast | ||||||
| from supervisor.utils.logging import activate_log_queue_handler |  | ||||||
|  | # Enable fast zlib before importing supervisor | ||||||
|  | zlib_fast.enable() | ||||||
|  |  | ||||||
|  | from supervisor import bootstrap  # pylint: disable=wrong-import-position # noqa: E402 | ||||||
|  | from supervisor.utils.logging import (  # pylint: disable=wrong-import-position  # noqa: E402 | ||||||
|  |     activate_log_queue_handler, | ||||||
|  | ) | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,374 +1 @@ | |||||||
| """Init file for Supervisor add-ons.""" | """Init file for Supervisor add-ons.""" | ||||||
| import asyncio |  | ||||||
| from collections.abc import Awaitable |  | ||||||
| from contextlib import suppress |  | ||||||
| import logging |  | ||||||
| import tarfile |  | ||||||
| from typing import Union |  | ||||||
|  |  | ||||||
| from ..const import AddonBoot, AddonStartup, AddonState |  | ||||||
| from ..coresys import CoreSys, CoreSysAttributes |  | ||||||
| from ..exceptions import ( |  | ||||||
|     AddonConfigurationError, |  | ||||||
|     AddonsError, |  | ||||||
|     AddonsJobError, |  | ||||||
|     AddonsNotSupportedError, |  | ||||||
|     CoreDNSError, |  | ||||||
|     DockerAPIError, |  | ||||||
|     DockerError, |  | ||||||
|     DockerNotFound, |  | ||||||
|     HassioError, |  | ||||||
|     HomeAssistantAPIError, |  | ||||||
| ) |  | ||||||
| from ..jobs.decorator import Job, JobCondition |  | ||||||
| from ..resolution.const import ContextType, IssueType, SuggestionType |  | ||||||
| from ..store.addon import AddonStore |  | ||||||
| from ..utils import check_exception_chain |  | ||||||
| from ..utils.sentry import capture_exception |  | ||||||
| from .addon import Addon |  | ||||||
| from .const import ADDON_UPDATE_CONDITIONS |  | ||||||
| from .data import AddonsData |  | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| AnyAddon = Union[Addon, AddonStore] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AddonManager(CoreSysAttributes): |  | ||||||
|     """Manage add-ons inside Supervisor.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, coresys: CoreSys): |  | ||||||
|         """Initialize Docker base wrapper.""" |  | ||||||
|         self.coresys: CoreSys = coresys |  | ||||||
|         self.data: AddonsData = AddonsData(coresys) |  | ||||||
|         self.local: dict[str, Addon] = {} |  | ||||||
|         self.store: dict[str, AddonStore] = {} |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def all(self) -> list[AnyAddon]: |  | ||||||
|         """Return a list of all add-ons.""" |  | ||||||
|         addons: dict[str, AnyAddon] = {**self.store, **self.local} |  | ||||||
|         return list(addons.values()) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def installed(self) -> list[Addon]: |  | ||||||
|         """Return a list of all installed add-ons.""" |  | ||||||
|         return list(self.local.values()) |  | ||||||
|  |  | ||||||
|     def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None: |  | ||||||
|         """Return an add-on from slug. |  | ||||||
|  |  | ||||||
|         Prio: |  | ||||||
|           1 - Local |  | ||||||
|           2 - Store |  | ||||||
|         """ |  | ||||||
|         if addon_slug in self.local: |  | ||||||
|             return self.local[addon_slug] |  | ||||||
|         if not local_only: |  | ||||||
|             return self.store.get(addon_slug) |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def from_token(self, token: str) -> Addon | None: |  | ||||||
|         """Return an add-on from Supervisor token.""" |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if token == addon.supervisor_token: |  | ||||||
|                 return addon |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     async def load(self) -> None: |  | ||||||
|         """Start up add-on management.""" |  | ||||||
|         tasks = [] |  | ||||||
|         for slug in self.data.system: |  | ||||||
|             addon = self.local[slug] = Addon(self.coresys, slug) |  | ||||||
|             tasks.append(self.sys_create_task(addon.load())) |  | ||||||
|  |  | ||||||
|         # Run initial tasks |  | ||||||
|         _LOGGER.info("Found %d installed add-ons", len(tasks)) |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks) |  | ||||||
|  |  | ||||||
|         # Sync DNS |  | ||||||
|         await self.sync_dns() |  | ||||||
|  |  | ||||||
|     async def boot(self, stage: AddonStartup) -> None: |  | ||||||
|         """Boot add-ons with mode auto.""" |  | ||||||
|         tasks: list[Addon] = [] |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if addon.boot != AddonBoot.AUTO or addon.startup != stage: |  | ||||||
|                 continue |  | ||||||
|             tasks.append(addon) |  | ||||||
|  |  | ||||||
|         # Evaluate add-ons which need to be started |  | ||||||
|         _LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks)) |  | ||||||
|         if not tasks: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Start Add-ons sequential |  | ||||||
|         # avoid issue on slow IO |  | ||||||
|         # Config.wait_boot is deprecated. Until addons update with healthchecks, |  | ||||||
|         # add a sleep task for it to keep the same minimum amount of wait time |  | ||||||
|         wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)] |  | ||||||
|         for addon in tasks: |  | ||||||
|             try: |  | ||||||
|                 if start_task := await addon.start(): |  | ||||||
|                     wait_boot.append(start_task) |  | ||||||
|             except AddonsError as err: |  | ||||||
|                 # Check if there is an system/user issue |  | ||||||
|                 if check_exception_chain( |  | ||||||
|                     err, (DockerAPIError, DockerNotFound, AddonConfigurationError) |  | ||||||
|                 ): |  | ||||||
|                     addon.boot = AddonBoot.MANUAL |  | ||||||
|                     addon.save_persist() |  | ||||||
|             except HassioError: |  | ||||||
|                 pass  # These are already handled |  | ||||||
|             else: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             _LOGGER.warning("Can't start Add-on %s", addon.slug) |  | ||||||
|  |  | ||||||
|         # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere |  | ||||||
|         await asyncio.gather(*wait_boot, return_exceptions=True) |  | ||||||
|  |  | ||||||
|     async def shutdown(self, stage: AddonStartup) -> None: |  | ||||||
|         """Shutdown addons.""" |  | ||||||
|         tasks: list[Addon] = [] |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if addon.state != AddonState.STARTED or addon.startup != stage: |  | ||||||
|                 continue |  | ||||||
|             tasks.append(addon) |  | ||||||
|  |  | ||||||
|         # Evaluate add-ons which need to be stopped |  | ||||||
|         _LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks)) |  | ||||||
|         if not tasks: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Stop Add-ons sequential |  | ||||||
|         # avoid issue on slow IO |  | ||||||
|         for addon in tasks: |  | ||||||
|             try: |  | ||||||
|                 await addon.stop() |  | ||||||
|             except Exception as err:  # pylint: disable=broad-except |  | ||||||
|                 _LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err) |  | ||||||
|                 capture_exception(err) |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_install", |  | ||||||
|         conditions=ADDON_UPDATE_CONDITIONS, |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def install(self, slug: str) -> None: |  | ||||||
|         """Install an add-on.""" |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug in self.local: |  | ||||||
|             raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning) |  | ||||||
|         store = self.store.get(slug) |  | ||||||
|  |  | ||||||
|         if not store: |  | ||||||
|             raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error) |  | ||||||
|  |  | ||||||
|         store.validate_availability() |  | ||||||
|  |  | ||||||
|         await Addon(self.coresys, slug).install() |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Add-on '%s' successfully installed", slug) |  | ||||||
|  |  | ||||||
|     async def uninstall(self, slug: str) -> None: |  | ||||||
|         """Remove an add-on.""" |  | ||||||
|         if slug not in self.local: |  | ||||||
|             _LOGGER.warning("Add-on %s is not installed", slug) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         await self.local[slug].uninstall() |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Add-on '%s' successfully removed", slug) |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_update", |  | ||||||
|         conditions=ADDON_UPDATE_CONDITIONS, |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def update( |  | ||||||
|         self, slug: str, backup: bool | None = False |  | ||||||
|     ) -> asyncio.Task | None: |  | ||||||
|         """Update add-on. |  | ||||||
|  |  | ||||||
|         Returns a Task that completes when addon has state 'started' (see addon.start) |  | ||||||
|         if addon is started after update. Else nothing is returned. |  | ||||||
|         """ |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug not in self.local: |  | ||||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) |  | ||||||
|         addon = self.local[slug] |  | ||||||
|  |  | ||||||
|         if addon.is_detached: |  | ||||||
|             raise AddonsError( |  | ||||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|         store = self.store[slug] |  | ||||||
|  |  | ||||||
|         if addon.version == store.version: |  | ||||||
|             raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning) |  | ||||||
|  |  | ||||||
|         # Check if available, Maybe something have changed |  | ||||||
|         store.validate_availability() |  | ||||||
|  |  | ||||||
|         if backup: |  | ||||||
|             await self.sys_backups.do_backup_partial( |  | ||||||
|                 name=f"addon_{addon.slug}_{addon.version}", |  | ||||||
|                 homeassistant=False, |  | ||||||
|                 addons=[addon.slug], |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return await addon.update() |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_rebuild", |  | ||||||
|         conditions=[ |  | ||||||
|             JobCondition.FREE_SPACE, |  | ||||||
|             JobCondition.INTERNET_HOST, |  | ||||||
|             JobCondition.HEALTHY, |  | ||||||
|         ], |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def rebuild(self, slug: str) -> asyncio.Task | None: |  | ||||||
|         """Perform a rebuild of local build add-on. |  | ||||||
|  |  | ||||||
|         Returns a Task that completes when addon has state 'started' (see addon.start) |  | ||||||
|         if addon is started after rebuild. Else nothing is returned. |  | ||||||
|         """ |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug not in self.local: |  | ||||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) |  | ||||||
|         addon = self.local[slug] |  | ||||||
|  |  | ||||||
|         if addon.is_detached: |  | ||||||
|             raise AddonsError( |  | ||||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|         store = self.store[slug] |  | ||||||
|  |  | ||||||
|         # Check if a rebuild is possible now |  | ||||||
|         if addon.version != store.version: |  | ||||||
|             raise AddonsError( |  | ||||||
|                 "Version changed, use Update instead Rebuild", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|         if not addon.need_build: |  | ||||||
|             raise AddonsNotSupportedError( |  | ||||||
|                 "Can't rebuild a image based add-on", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return await addon.rebuild() |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_restore", |  | ||||||
|         conditions=[ |  | ||||||
|             JobCondition.FREE_SPACE, |  | ||||||
|             JobCondition.INTERNET_HOST, |  | ||||||
|             JobCondition.HEALTHY, |  | ||||||
|         ], |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def restore( |  | ||||||
|         self, slug: str, tar_file: tarfile.TarFile |  | ||||||
|     ) -> asyncio.Task | None: |  | ||||||
|         """Restore state of an add-on. |  | ||||||
|  |  | ||||||
|         Returns a Task that completes when addon has state 'started' (see addon.start) |  | ||||||
|         if addon is started after restore. Else nothing is returned. |  | ||||||
|         """ |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug not in self.local: |  | ||||||
|             _LOGGER.debug("Add-on %s is not local available for restore", slug) |  | ||||||
|             addon = Addon(self.coresys, slug) |  | ||||||
|             had_ingress = False |  | ||||||
|         else: |  | ||||||
|             _LOGGER.debug("Add-on %s is local available for restore", slug) |  | ||||||
|             addon = self.local[slug] |  | ||||||
|             had_ingress = addon.ingress_panel |  | ||||||
|  |  | ||||||
|         wait_for_start = await addon.restore(tar_file) |  | ||||||
|  |  | ||||||
|         # Check if new |  | ||||||
|         if slug not in self.local: |  | ||||||
|             _LOGGER.info("Detect new Add-on after restore %s", slug) |  | ||||||
|             self.local[slug] = addon |  | ||||||
|  |  | ||||||
|         # Update ingress |  | ||||||
|         if had_ingress != addon.ingress_panel: |  | ||||||
|             await self.sys_ingress.reload() |  | ||||||
|             with suppress(HomeAssistantAPIError): |  | ||||||
|                 await self.sys_ingress.update_hass_panel(addon) |  | ||||||
|  |  | ||||||
|         return wait_for_start |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_repair", |  | ||||||
|         conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST], |  | ||||||
|     ) |  | ||||||
|     async def repair(self) -> None: |  | ||||||
|         """Repair local add-ons.""" |  | ||||||
|         needs_repair: list[Addon] = [] |  | ||||||
|  |  | ||||||
|         # Evaluate Add-ons to repair |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if await addon.instance.exists(): |  | ||||||
|                 continue |  | ||||||
|             needs_repair.append(addon) |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Found %d add-ons to repair", len(needs_repair)) |  | ||||||
|         if not needs_repair: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         for addon in needs_repair: |  | ||||||
|             _LOGGER.info("Repairing for add-on: %s", addon.slug) |  | ||||||
|             with suppress(DockerError, KeyError): |  | ||||||
|                 # Need pull a image again |  | ||||||
|                 if not addon.need_build: |  | ||||||
|                     await addon.instance.install(addon.version, addon.image) |  | ||||||
|                     continue |  | ||||||
|  |  | ||||||
|                 # Need local lookup |  | ||||||
|                 if addon.need_build and not addon.is_detached: |  | ||||||
|                     store = self.store[addon.slug] |  | ||||||
|                     # If this add-on is available for rebuild |  | ||||||
|                     if addon.version == store.version: |  | ||||||
|                         await addon.instance.install(addon.version, addon.image) |  | ||||||
|                         continue |  | ||||||
|  |  | ||||||
|             _LOGGER.error("Can't repair %s", addon.slug) |  | ||||||
|             with suppress(AddonsError): |  | ||||||
|                 await self.uninstall(addon.slug) |  | ||||||
|  |  | ||||||
|     async def sync_dns(self) -> None: |  | ||||||
|         """Sync add-ons DNS names.""" |  | ||||||
|         # Update hosts |  | ||||||
|         add_host_coros: list[Awaitable[None]] = [] |  | ||||||
|         for addon in self.installed: |  | ||||||
|             try: |  | ||||||
|                 if not await addon.instance.is_running(): |  | ||||||
|                     continue |  | ||||||
|             except DockerError as err: |  | ||||||
|                 _LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err) |  | ||||||
|                 self.sys_resolution.create_issue( |  | ||||||
|                     IssueType.CORRUPT_DOCKER, |  | ||||||
|                     ContextType.ADDON, |  | ||||||
|                     reference=addon.slug, |  | ||||||
|                     suggestions=[SuggestionType.EXECUTE_REPAIR], |  | ||||||
|                 ) |  | ||||||
|                 capture_exception(err) |  | ||||||
|             else: |  | ||||||
|                 add_host_coros.append( |  | ||||||
|                     self.sys_plugins.dns.add_host( |  | ||||||
|                         ipv4=addon.ip_address, names=[addon.hostname], write=False |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         await asyncio.gather(*add_host_coros) |  | ||||||
|  |  | ||||||
|         # Write hosts files |  | ||||||
|         with suppress(CoreDNSError): |  | ||||||
|             await self.sys_plugins.dns.write_hosts() |  | ||||||
|   | |||||||
| @@ -3,6 +3,8 @@ import asyncio | |||||||
| from collections.abc import Awaitable | from collections.abc import Awaitable | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
| from copy import deepcopy | from copy import deepcopy | ||||||
|  | from datetime import datetime | ||||||
|  | import errno | ||||||
| from ipaddress import IPv4Address | from ipaddress import IPv4Address | ||||||
| import logging | import logging | ||||||
| from pathlib import Path, PurePath | from pathlib import Path, PurePath | ||||||
| @@ -14,11 +16,14 @@ from tempfile import TemporaryDirectory | |||||||
| from typing import Any, Final | from typing import Any, Final | ||||||
|  |  | ||||||
| import aiohttp | import aiohttp | ||||||
|  | from awesomeversion import AwesomeVersionCompareException | ||||||
| from deepmerge import Merger | from deepmerge import Merger | ||||||
| from securetar import atomic_contents_add, secure_path | from securetar import atomic_contents_add, secure_path | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
| from voluptuous.humanize import humanize_error | from voluptuous.humanize import humanize_error | ||||||
|  |  | ||||||
|  | from supervisor.utils.dt import utc_from_timestamp | ||||||
|  |  | ||||||
| from ..bus import EventListener | from ..bus import EventListener | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     ATTR_ACCESS_TOKEN, |     ATTR_ACCESS_TOKEN, | ||||||
| @@ -45,9 +50,9 @@ from ..const import ( | |||||||
|     ATTR_USER, |     ATTR_USER, | ||||||
|     ATTR_UUID, |     ATTR_UUID, | ||||||
|     ATTR_VERSION, |     ATTR_VERSION, | ||||||
|  |     ATTR_VERSION_TIMESTAMP, | ||||||
|     ATTR_WATCHDOG, |     ATTR_WATCHDOG, | ||||||
|     DNS_SUFFIX, |     DNS_SUFFIX, | ||||||
|     MAP_ADDON_CONFIG, |  | ||||||
|     AddonBoot, |     AddonBoot, | ||||||
|     AddonStartup, |     AddonStartup, | ||||||
|     AddonState, |     AddonState, | ||||||
| @@ -72,6 +77,7 @@ from ..hardware.data import Device | |||||||
| from ..homeassistant.const import WSEvent, WSType | from ..homeassistant.const import WSEvent, WSType | ||||||
| from ..jobs.const import JobExecutionLimit | from ..jobs.const import JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
| from ..store.addon import AddonStore | from ..store.addon import AddonStore | ||||||
| from ..utils import check_port | from ..utils import check_port | ||||||
| from ..utils.apparmor import adjust_profile | from ..utils.apparmor import adjust_profile | ||||||
| @@ -83,6 +89,7 @@ from .const import ( | |||||||
|     WATCHDOG_THROTTLE_MAX_CALLS, |     WATCHDOG_THROTTLE_MAX_CALLS, | ||||||
|     WATCHDOG_THROTTLE_PERIOD, |     WATCHDOG_THROTTLE_PERIOD, | ||||||
|     AddonBackupMode, |     AddonBackupMode, | ||||||
|  |     MappingType, | ||||||
| ) | ) | ||||||
| from .model import AddonModel, Data | from .model import AddonModel, Data | ||||||
| from .options import AddonOptions | from .options import AddonOptions | ||||||
| @@ -173,6 +180,9 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|     async def load(self) -> None: |     async def load(self) -> None: | ||||||
|         """Async initialize of object.""" |         """Async initialize of object.""" | ||||||
|  |         if self.is_detached: | ||||||
|  |             await super().refresh_path_cache() | ||||||
|  |  | ||||||
|         self._listeners.append( |         self._listeners.append( | ||||||
|             self.sys_bus.register_event( |             self.sys_bus.register_event( | ||||||
|                 BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed |                 BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed | ||||||
| @@ -184,9 +194,21 @@ class Addon(AddonModel): | |||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         with suppress(DockerError): |         await self._check_ingress_port() | ||||||
|  |         default_image = self._image(self.data) | ||||||
|  |         try: | ||||||
|             await self.instance.attach(version=self.version) |             await self.instance.attach(version=self.version) | ||||||
|  |  | ||||||
|  |             # Ensure we are using correct image for this system | ||||||
|  |             await self.instance.check_image(self.version, default_image, self.arch) | ||||||
|  |         except DockerError: | ||||||
|  |             _LOGGER.info("No %s addon Docker image %s found", self.slug, self.image) | ||||||
|  |             with suppress(DockerError): | ||||||
|  |                 await self.instance.install(self.version, default_image, arch=self.arch) | ||||||
|  |  | ||||||
|  |         self.persist[ATTR_IMAGE] = default_image | ||||||
|  |         self.save_persist() | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def ip_address(self) -> IPv4Address: |     def ip_address(self) -> IPv4Address: | ||||||
|         """Return IP of add-on instance.""" |         """Return IP of add-on instance.""" | ||||||
| @@ -222,6 +244,34 @@ class Addon(AddonModel): | |||||||
|         """Return True if add-on is detached.""" |         """Return True if add-on is detached.""" | ||||||
|         return self.slug not in self.sys_store.data.addons |         return self.slug not in self.sys_store.data.addons | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_icon(self) -> bool: | ||||||
|  |         """Return True if an icon exists.""" | ||||||
|  |         if self.is_detached: | ||||||
|  |             return super().with_icon | ||||||
|  |         return self.addon_store.with_icon | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_logo(self) -> bool: | ||||||
|  |         """Return True if a logo exists.""" | ||||||
|  |         if self.is_detached: | ||||||
|  |             return super().with_logo | ||||||
|  |         return self.addon_store.with_logo | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_changelog(self) -> bool: | ||||||
|  |         """Return True if a changelog exists.""" | ||||||
|  |         if self.is_detached: | ||||||
|  |             return super().with_changelog | ||||||
|  |         return self.addon_store.with_changelog | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def with_documentation(self) -> bool: | ||||||
|  |         """Return True if a documentation exists.""" | ||||||
|  |         if self.is_detached: | ||||||
|  |             return super().with_documentation | ||||||
|  |         return self.addon_store.with_documentation | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def available(self) -> bool: |     def available(self) -> bool: | ||||||
|         """Return True if this add-on is available on this platform.""" |         """Return True if this add-on is available on this platform.""" | ||||||
| @@ -235,9 +285,13 @@ class Addon(AddonModel): | |||||||
|     @property |     @property | ||||||
|     def need_update(self) -> bool: |     def need_update(self) -> bool: | ||||||
|         """Return True if an update is available.""" |         """Return True if an update is available.""" | ||||||
|         if self.is_detached: |         if self.is_detached or self.version == self.latest_version: | ||||||
|             return False |             return False | ||||||
|         return self.version != self.latest_version |  | ||||||
|  |         with suppress(AddonsNotSupportedError): | ||||||
|  |             self._validate_availability(self.data_store) | ||||||
|  |             return True | ||||||
|  |         return False | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def dns(self) -> list[str]: |     def dns(self) -> list[str]: | ||||||
| @@ -276,6 +330,28 @@ class Addon(AddonModel): | |||||||
|         """Set auto update.""" |         """Set auto update.""" | ||||||
|         self.persist[ATTR_AUTO_UPDATE] = value |         self.persist[ATTR_AUTO_UPDATE] = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def auto_update_available(self) -> bool: | ||||||
|  |         """Return if it is safe to auto update addon.""" | ||||||
|  |         if not self.need_update or not self.auto_update: | ||||||
|  |             return False | ||||||
|  |  | ||||||
|  |         for version in self.breaking_versions: | ||||||
|  |             try: | ||||||
|  |                 # Must update to latest so if true update crosses a breaking version | ||||||
|  |                 if self.version < version: | ||||||
|  |                     return False | ||||||
|  |             except AwesomeVersionCompareException: | ||||||
|  |                 # If version scheme changed, we may get compare exception | ||||||
|  |                 # If latest version >= breaking version then assume update will | ||||||
|  |                 # cross it as the version scheme changes | ||||||
|  |                 # If both versions have compare exception, ignore as its in the past | ||||||
|  |                 with suppress(AwesomeVersionCompareException): | ||||||
|  |                     if self.latest_version >= version: | ||||||
|  |                         return False | ||||||
|  |  | ||||||
|  |         return True | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def watchdog(self) -> bool: |     def watchdog(self) -> bool: | ||||||
|         """Return True if watchdog is enable.""" |         """Return True if watchdog is enable.""" | ||||||
| @@ -318,6 +394,11 @@ class Addon(AddonModel): | |||||||
|         """Return version of add-on.""" |         """Return version of add-on.""" | ||||||
|         return self.data_store[ATTR_VERSION] |         return self.data_store[ATTR_VERSION] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def latest_version_timestamp(self) -> datetime: | ||||||
|  |         """Return when latest version was first seen.""" | ||||||
|  |         return utc_from_timestamp(self.data_store[ATTR_VERSION_TIMESTAMP]) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def protected(self) -> bool: |     def protected(self) -> bool: | ||||||
|         """Return if add-on is in protected mode.""" |         """Return if add-on is in protected mode.""" | ||||||
| @@ -395,7 +476,7 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|         port = self.data[ATTR_INGRESS_PORT] |         port = self.data[ATTR_INGRESS_PORT] | ||||||
|         if port == 0: |         if port == 0: | ||||||
|             return self.sys_ingress.get_dynamic_port(self.slug) |             raise RuntimeError(f"No port set for add-on {self.slug}") | ||||||
|         return port |         return port | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -464,7 +545,7 @@ class Addon(AddonModel): | |||||||
|     @property |     @property | ||||||
|     def addon_config_used(self) -> bool: |     def addon_config_used(self) -> bool: | ||||||
|         """Add-on is using its public config folder.""" |         """Add-on is using its public config folder.""" | ||||||
|         return MAP_ADDON_CONFIG in self.map_volumes |         return MappingType.ADDON_CONFIG in self.map_volumes | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def path_config(self) -> Path: |     def path_config(self) -> Path: | ||||||
| @@ -539,7 +620,7 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|         # TCP monitoring |         # TCP monitoring | ||||||
|         if s_prefix == "tcp": |         if s_prefix == "tcp": | ||||||
|             return await self.sys_run_in_executor(check_port, self.ip_address, port) |             return await check_port(self.ip_address, port) | ||||||
|  |  | ||||||
|         # lookup the correct protocol from config |         # lookup the correct protocol from config | ||||||
|         if t_proto: |         if t_proto: | ||||||
| @@ -602,6 +683,16 @@ class Addon(AddonModel): | |||||||
|             _LOGGER.info("Removing add-on data folder %s", self.path_data) |             _LOGGER.info("Removing add-on data folder %s", self.path_data) | ||||||
|             await remove_data(self.path_data) |             await remove_data(self.path_data) | ||||||
|  |  | ||||||
|  |     async def _check_ingress_port(self): | ||||||
|  |         """Assign a ingress port if dynamic port selection is used.""" | ||||||
|  |         if not self.with_ingress: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if self.data[ATTR_INGRESS_PORT] == 0: | ||||||
|  |             self.data[ATTR_INGRESS_PORT] = await self.sys_ingress.get_dynamic_port( | ||||||
|  |                 self.slug | ||||||
|  |             ) | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|         name="addon_install", |         name="addon_install", | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
| @@ -642,7 +733,7 @@ class Addon(AddonModel): | |||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|         on_condition=AddonsJobError, |         on_condition=AddonsJobError, | ||||||
|     ) |     ) | ||||||
|     async def uninstall(self) -> None: |     async def uninstall(self, *, remove_config: bool) -> None: | ||||||
|         """Uninstall and cleanup this addon.""" |         """Uninstall and cleanup this addon.""" | ||||||
|         try: |         try: | ||||||
|             await self.instance.remove() |             await self.instance.remove() | ||||||
| @@ -653,6 +744,10 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|         await self.unload() |         await self.unload() | ||||||
|  |  | ||||||
|  |         # Remove config if present and requested | ||||||
|  |         if self.addon_config_used and remove_config: | ||||||
|  |             await remove_data(self.path_config) | ||||||
|  |  | ||||||
|         # Cleanup audio settings |         # Cleanup audio settings | ||||||
|         if self.path_pulse.exists(): |         if self.path_pulse.exists(): | ||||||
|             with suppress(OSError): |             with suppress(OSError): | ||||||
| @@ -705,7 +800,7 @@ class Addon(AddonModel): | |||||||
|         store = self.addon_store.clone() |         store = self.addon_store.clone() | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             await self.instance.update(store.version, store.image) |             await self.instance.update(store.version, store.image, arch=self.arch) | ||||||
|         except DockerError as err: |         except DockerError as err: | ||||||
|             raise AddonsError() from err |             raise AddonsError() from err | ||||||
|  |  | ||||||
| @@ -716,6 +811,7 @@ class Addon(AddonModel): | |||||||
|         try: |         try: | ||||||
|             _LOGGER.info("Add-on '%s' successfully updated", self.slug) |             _LOGGER.info("Add-on '%s' successfully updated", self.slug) | ||||||
|             self.sys_addons.data.update(store) |             self.sys_addons.data.update(store) | ||||||
|  |             await self._check_ingress_port() | ||||||
|  |  | ||||||
|             # Cleanup |             # Cleanup | ||||||
|             with suppress(DockerError): |             with suppress(DockerError): | ||||||
| @@ -756,6 +852,7 @@ class Addon(AddonModel): | |||||||
|                 raise AddonsError() from err |                 raise AddonsError() from err | ||||||
|  |  | ||||||
|             self.sys_addons.data.update(self.addon_store) |             self.sys_addons.data.update(self.addon_store) | ||||||
|  |             await self._check_ingress_port() | ||||||
|             _LOGGER.info("Add-on '%s' successfully rebuilt", self.slug) |             _LOGGER.info("Add-on '%s' successfully rebuilt", self.slug) | ||||||
|  |  | ||||||
|         finally: |         finally: | ||||||
| @@ -781,6 +878,8 @@ class Addon(AddonModel): | |||||||
|         try: |         try: | ||||||
|             self.path_pulse.write_text(pulse_config, encoding="utf-8") |             self.path_pulse.write_text(pulse_config, encoding="utf-8") | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error( |             _LOGGER.error( | ||||||
|                 "Add-on %s can't write pulse/client.config: %s", self.slug, err |                 "Add-on %s can't write pulse/client.config: %s", self.slug, err | ||||||
|             ) |             ) | ||||||
| @@ -1139,7 +1238,11 @@ class Addon(AddonModel): | |||||||
|             def _extract_tarfile(): |             def _extract_tarfile(): | ||||||
|                 """Extract tar backup.""" |                 """Extract tar backup.""" | ||||||
|                 with tar_file as backup: |                 with tar_file as backup: | ||||||
|                     backup.extractall(path=Path(temp), members=secure_path(backup)) |                     backup.extractall( | ||||||
|  |                         path=Path(temp), | ||||||
|  |                         members=secure_path(backup), | ||||||
|  |                         filter="fully_trusted", | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 await self.sys_run_in_executor(_extract_tarfile) |                 await self.sys_run_in_executor(_extract_tarfile) | ||||||
| @@ -1193,12 +1296,15 @@ class Addon(AddonModel): | |||||||
|                             await self.instance.import_image(image_file) |                             await self.instance.import_image(image_file) | ||||||
|                     else: |                     else: | ||||||
|                         with suppress(DockerError): |                         with suppress(DockerError): | ||||||
|                             await self.instance.install(version, restore_image) |                             await self.instance.install( | ||||||
|  |                                 version, restore_image, self.arch | ||||||
|  |                             ) | ||||||
|                             await self.instance.cleanup() |                             await self.instance.cleanup() | ||||||
|                 elif self.instance.version != version or self.legacy: |                 elif self.instance.version != version or self.legacy: | ||||||
|                     _LOGGER.info("Restore/Update of image for addon %s", self.slug) |                     _LOGGER.info("Restore/Update of image for addon %s", self.slug) | ||||||
|                     with suppress(DockerError): |                     with suppress(DockerError): | ||||||
|                         await self.instance.update(version, restore_image) |                         await self.instance.update(version, restore_image, self.arch) | ||||||
|  |                 await self._check_ingress_port() | ||||||
|  |  | ||||||
|                 # Restore data and config |                 # Restore data and config | ||||||
|                 def _restore_data(): |                 def _restore_data(): | ||||||
| @@ -1241,11 +1347,11 @@ class Addon(AddonModel): | |||||||
|                         ) |                         ) | ||||||
|                         raise AddonsError() from err |                         raise AddonsError() from err | ||||||
|  |  | ||||||
|  |             finally: | ||||||
|                 # Is add-on loaded |                 # Is add-on loaded | ||||||
|                 if not self.loaded: |                 if not self.loaded: | ||||||
|                     await self.load() |                     await self.load() | ||||||
|  |  | ||||||
|             finally: |  | ||||||
|                 # Run add-on |                 # Run add-on | ||||||
|                 if data[ATTR_STATE] == AddonState.STARTED: |                 if data[ATTR_STATE] == AddonState.STARTED: | ||||||
|                     wait_for_start = await self.start() |                     wait_for_start = await self.start() | ||||||
| @@ -1339,3 +1445,9 @@ class Addon(AddonModel): | |||||||
|             ContainerState.UNHEALTHY, |             ContainerState.UNHEALTHY, | ||||||
|         ]: |         ]: | ||||||
|             await self._restart_after_problem(event.state) |             await self._restart_after_problem(event.state) | ||||||
|  |  | ||||||
|  |     def refresh_path_cache(self) -> Awaitable[None]: | ||||||
|  |         """Refresh cache of existing paths.""" | ||||||
|  |         if self.is_detached: | ||||||
|  |             return super().refresh_path_cache() | ||||||
|  |         return self.addon_store.refresh_path_cache() | ||||||
|   | |||||||
| @@ -102,11 +102,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes): | |||||||
|         except HassioArchNotFound: |         except HassioArchNotFound: | ||||||
|             return False |             return False | ||||||
|  |  | ||||||
|     def get_docker_args(self, version: AwesomeVersion): |     def get_docker_args(self, version: AwesomeVersion, image: str | None = None): | ||||||
|         """Create a dict with Docker build arguments.""" |         """Create a dict with Docker build arguments.""" | ||||||
|         args = { |         args = { | ||||||
|             "path": str(self.addon.path_location), |             "path": str(self.addon.path_location), | ||||||
|             "tag": f"{self.addon.image}:{version!s}", |             "tag": f"{image or self.addon.image}:{version!s}", | ||||||
|             "dockerfile": str(self.dockerfile), |             "dockerfile": str(self.dockerfile), | ||||||
|             "pull": True, |             "pull": True, | ||||||
|             "forcerm": not self.sys_dev, |             "forcerm": not self.sys_dev, | ||||||
|   | |||||||
							
								
								
									
										11
									
								
								supervisor/addons/configuration.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								supervisor/addons/configuration.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | """Confgiuration Objects for Addon Config.""" | ||||||
|  |  | ||||||
|  | from dataclasses import dataclass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @dataclass(slots=True) | ||||||
|  | class FolderMapping: | ||||||
|  |     """Represent folder mapping configuration.""" | ||||||
|  |  | ||||||
|  |     path: str | None | ||||||
|  |     read_only: bool | ||||||
| @@ -12,8 +12,26 @@ class AddonBackupMode(StrEnum): | |||||||
|     COLD = "cold" |     COLD = "cold" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MappingType(StrEnum): | ||||||
|  |     """Mapping type of an Add-on Folder.""" | ||||||
|  |  | ||||||
|  |     DATA = "data" | ||||||
|  |     CONFIG = "config" | ||||||
|  |     SSL = "ssl" | ||||||
|  |     ADDONS = "addons" | ||||||
|  |     BACKUP = "backup" | ||||||
|  |     SHARE = "share" | ||||||
|  |     MEDIA = "media" | ||||||
|  |     HOMEASSISTANT_CONFIG = "homeassistant_config" | ||||||
|  |     ALL_ADDON_CONFIGS = "all_addon_configs" | ||||||
|  |     ADDON_CONFIG = "addon_config" | ||||||
|  |  | ||||||
|  |  | ||||||
| ATTR_BACKUP = "backup" | ATTR_BACKUP = "backup" | ||||||
|  | ATTR_BREAKING_VERSIONS = "breaking_versions" | ||||||
| ATTR_CODENOTARY = "codenotary" | ATTR_CODENOTARY = "codenotary" | ||||||
|  | ATTR_READ_ONLY = "read_only" | ||||||
|  | ATTR_PATH = "path" | ||||||
| WATCHDOG_RETRY_SECONDS = 10 | WATCHDOG_RETRY_SECONDS = 10 | ||||||
| WATCHDOG_MAX_ATTEMPTS = 5 | WATCHDOG_MAX_ATTEMPTS = 5 | ||||||
| WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30) | WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30) | ||||||
|   | |||||||
							
								
								
									
										379
									
								
								supervisor/addons/manager.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										379
									
								
								supervisor/addons/manager.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,379 @@ | |||||||
|  | """Supervisor add-on manager.""" | ||||||
|  | import asyncio | ||||||
|  | from collections.abc import Awaitable | ||||||
|  | from contextlib import suppress | ||||||
|  | import logging | ||||||
|  | import tarfile | ||||||
|  | from typing import Union | ||||||
|  |  | ||||||
|  | from ..const import AddonBoot, AddonStartup, AddonState | ||||||
|  | from ..coresys import CoreSys, CoreSysAttributes | ||||||
|  | from ..exceptions import ( | ||||||
|  |     AddonConfigurationError, | ||||||
|  |     AddonsError, | ||||||
|  |     AddonsJobError, | ||||||
|  |     AddonsNotSupportedError, | ||||||
|  |     CoreDNSError, | ||||||
|  |     DockerAPIError, | ||||||
|  |     DockerError, | ||||||
|  |     DockerNotFound, | ||||||
|  |     HassioError, | ||||||
|  |     HomeAssistantAPIError, | ||||||
|  | ) | ||||||
|  | from ..jobs.decorator import Job, JobCondition | ||||||
|  | from ..resolution.const import ContextType, IssueType, SuggestionType | ||||||
|  | from ..store.addon import AddonStore | ||||||
|  | from ..utils import check_exception_chain | ||||||
|  | from ..utils.sentry import capture_exception | ||||||
|  | from .addon import Addon | ||||||
|  | from .const import ADDON_UPDATE_CONDITIONS | ||||||
|  | from .data import AddonsData | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | AnyAddon = Union[Addon, AddonStore] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AddonManager(CoreSysAttributes): | ||||||
|  |     """Manage add-ons inside Supervisor.""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coresys: CoreSys): | ||||||
|  |         """Initialize Docker base wrapper.""" | ||||||
|  |         self.coresys: CoreSys = coresys | ||||||
|  |         self.data: AddonsData = AddonsData(coresys) | ||||||
|  |         self.local: dict[str, Addon] = {} | ||||||
|  |         self.store: dict[str, AddonStore] = {} | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def all(self) -> list[AnyAddon]: | ||||||
|  |         """Return a list of all add-ons.""" | ||||||
|  |         addons: dict[str, AnyAddon] = {**self.store, **self.local} | ||||||
|  |         return list(addons.values()) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def installed(self) -> list[Addon]: | ||||||
|  |         """Return a list of all installed add-ons.""" | ||||||
|  |         return list(self.local.values()) | ||||||
|  |  | ||||||
|  |     def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None: | ||||||
|  |         """Return an add-on from slug. | ||||||
|  |  | ||||||
|  |         Prio: | ||||||
|  |           1 - Local | ||||||
|  |           2 - Store | ||||||
|  |         """ | ||||||
|  |         if addon_slug in self.local: | ||||||
|  |             return self.local[addon_slug] | ||||||
|  |         if not local_only: | ||||||
|  |             return self.store.get(addon_slug) | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     def from_token(self, token: str) -> Addon | None: | ||||||
|  |         """Return an add-on from Supervisor token.""" | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if token == addon.supervisor_token: | ||||||
|  |                 return addon | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     async def load(self) -> None: | ||||||
|  |         """Start up add-on management.""" | ||||||
|  |         # Refresh cache for all store addons | ||||||
|  |         tasks: list[Awaitable[None]] = [ | ||||||
|  |             store.refresh_path_cache() for store in self.store.values() | ||||||
|  |         ] | ||||||
|  |  | ||||||
|  |         # Load all installed addons | ||||||
|  |         for slug in self.data.system: | ||||||
|  |             addon = self.local[slug] = Addon(self.coresys, slug) | ||||||
|  |             tasks.append(addon.load()) | ||||||
|  |  | ||||||
|  |         # Run initial tasks | ||||||
|  |         _LOGGER.info("Found %d installed add-ons", len(self.data.system)) | ||||||
|  |         if tasks: | ||||||
|  |             await asyncio.gather(*tasks) | ||||||
|  |  | ||||||
|  |         # Sync DNS | ||||||
|  |         await self.sync_dns() | ||||||
|  |  | ||||||
|  |     async def boot(self, stage: AddonStartup) -> None: | ||||||
|  |         """Boot add-ons with mode auto.""" | ||||||
|  |         tasks: list[Addon] = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if addon.boot != AddonBoot.AUTO or addon.startup != stage: | ||||||
|  |                 continue | ||||||
|  |             tasks.append(addon) | ||||||
|  |  | ||||||
|  |         # Evaluate add-ons which need to be started | ||||||
|  |         _LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks)) | ||||||
|  |         if not tasks: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Start Add-ons sequential | ||||||
|  |         # avoid issue on slow IO | ||||||
|  |         # Config.wait_boot is deprecated. Until addons update with healthchecks, | ||||||
|  |         # add a sleep task for it to keep the same minimum amount of wait time | ||||||
|  |         wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)] | ||||||
|  |         for addon in tasks: | ||||||
|  |             try: | ||||||
|  |                 if start_task := await addon.start(): | ||||||
|  |                     wait_boot.append(start_task) | ||||||
|  |             except AddonsError as err: | ||||||
|  |                 # Check if there is an system/user issue | ||||||
|  |                 if check_exception_chain( | ||||||
|  |                     err, (DockerAPIError, DockerNotFound, AddonConfigurationError) | ||||||
|  |                 ): | ||||||
|  |                     addon.boot = AddonBoot.MANUAL | ||||||
|  |                     addon.save_persist() | ||||||
|  |             except HassioError: | ||||||
|  |                 pass  # These are already handled | ||||||
|  |             else: | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             _LOGGER.warning("Can't start Add-on %s", addon.slug) | ||||||
|  |  | ||||||
|  |         # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere | ||||||
|  |         await asyncio.gather(*wait_boot, return_exceptions=True) | ||||||
|  |  | ||||||
|  |     async def shutdown(self, stage: AddonStartup) -> None: | ||||||
|  |         """Shutdown addons.""" | ||||||
|  |         tasks: list[Addon] = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if addon.state != AddonState.STARTED or addon.startup != stage: | ||||||
|  |                 continue | ||||||
|  |             tasks.append(addon) | ||||||
|  |  | ||||||
|  |         # Evaluate add-ons which need to be stopped | ||||||
|  |         _LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks)) | ||||||
|  |         if not tasks: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Stop Add-ons sequential | ||||||
|  |         # avoid issue on slow IO | ||||||
|  |         for addon in tasks: | ||||||
|  |             try: | ||||||
|  |                 await addon.stop() | ||||||
|  |             except Exception as err:  # pylint: disable=broad-except | ||||||
|  |                 _LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err) | ||||||
|  |                 capture_exception(err) | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_install", | ||||||
|  |         conditions=ADDON_UPDATE_CONDITIONS, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def install(self, slug: str) -> None: | ||||||
|  |         """Install an add-on.""" | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug in self.local: | ||||||
|  |             raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning) | ||||||
|  |         store = self.store.get(slug) | ||||||
|  |  | ||||||
|  |         if not store: | ||||||
|  |             raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error) | ||||||
|  |  | ||||||
|  |         store.validate_availability() | ||||||
|  |  | ||||||
|  |         await Addon(self.coresys, slug).install() | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Add-on '%s' successfully installed", slug) | ||||||
|  |  | ||||||
|  |     async def uninstall(self, slug: str, *, remove_config: bool = False) -> None: | ||||||
|  |         """Remove an add-on.""" | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.warning("Add-on %s is not installed", slug) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         await self.local[slug].uninstall(remove_config=remove_config) | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Add-on '%s' successfully removed", slug) | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_update", | ||||||
|  |         conditions=ADDON_UPDATE_CONDITIONS, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def update( | ||||||
|  |         self, slug: str, backup: bool | None = False | ||||||
|  |     ) -> asyncio.Task | None: | ||||||
|  |         """Update add-on. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||||
|  |         if addon is started after update. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug not in self.local: | ||||||
|  |             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||||
|  |         addon = self.local[slug] | ||||||
|  |  | ||||||
|  |         if addon.is_detached: | ||||||
|  |             raise AddonsError( | ||||||
|  |                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |         store = self.store[slug] | ||||||
|  |  | ||||||
|  |         if addon.version == store.version: | ||||||
|  |             raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning) | ||||||
|  |  | ||||||
|  |         # Check if available, Maybe something have changed | ||||||
|  |         store.validate_availability() | ||||||
|  |  | ||||||
|  |         if backup: | ||||||
|  |             await self.sys_backups.do_backup_partial( | ||||||
|  |                 name=f"addon_{addon.slug}_{addon.version}", | ||||||
|  |                 homeassistant=False, | ||||||
|  |                 addons=[addon.slug], | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return await addon.update() | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_rebuild", | ||||||
|  |         conditions=[ | ||||||
|  |             JobCondition.FREE_SPACE, | ||||||
|  |             JobCondition.INTERNET_HOST, | ||||||
|  |             JobCondition.HEALTHY, | ||||||
|  |         ], | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def rebuild(self, slug: str) -> asyncio.Task | None: | ||||||
|  |         """Perform a rebuild of local build add-on. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||||
|  |         if addon is started after rebuild. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug not in self.local: | ||||||
|  |             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||||
|  |         addon = self.local[slug] | ||||||
|  |  | ||||||
|  |         if addon.is_detached: | ||||||
|  |             raise AddonsError( | ||||||
|  |                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |         store = self.store[slug] | ||||||
|  |  | ||||||
|  |         # Check if a rebuild is possible now | ||||||
|  |         if addon.version != store.version: | ||||||
|  |             raise AddonsError( | ||||||
|  |                 "Version changed, use Update instead Rebuild", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |         if not addon.need_build: | ||||||
|  |             raise AddonsNotSupportedError( | ||||||
|  |                 "Can't rebuild a image based add-on", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return await addon.rebuild() | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_restore", | ||||||
|  |         conditions=[ | ||||||
|  |             JobCondition.FREE_SPACE, | ||||||
|  |             JobCondition.INTERNET_HOST, | ||||||
|  |             JobCondition.HEALTHY, | ||||||
|  |         ], | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def restore( | ||||||
|  |         self, slug: str, tar_file: tarfile.TarFile | ||||||
|  |     ) -> asyncio.Task | None: | ||||||
|  |         """Restore state of an add-on. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||||
|  |         if addon is started after restore. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.debug("Add-on %s is not local available for restore", slug) | ||||||
|  |             addon = Addon(self.coresys, slug) | ||||||
|  |             had_ingress = False | ||||||
|  |         else: | ||||||
|  |             _LOGGER.debug("Add-on %s is local available for restore", slug) | ||||||
|  |             addon = self.local[slug] | ||||||
|  |             had_ingress = addon.ingress_panel | ||||||
|  |  | ||||||
|  |         wait_for_start = await addon.restore(tar_file) | ||||||
|  |  | ||||||
|  |         # Check if new | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.info("Detect new Add-on after restore %s", slug) | ||||||
|  |             self.local[slug] = addon | ||||||
|  |  | ||||||
|  |         # Update ingress | ||||||
|  |         if had_ingress != addon.ingress_panel: | ||||||
|  |             await self.sys_ingress.reload() | ||||||
|  |             with suppress(HomeAssistantAPIError): | ||||||
|  |                 await self.sys_ingress.update_hass_panel(addon) | ||||||
|  |  | ||||||
|  |         return wait_for_start | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_repair", | ||||||
|  |         conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST], | ||||||
|  |     ) | ||||||
|  |     async def repair(self) -> None: | ||||||
|  |         """Repair local add-ons.""" | ||||||
|  |         needs_repair: list[Addon] = [] | ||||||
|  |  | ||||||
|  |         # Evaluate Add-ons to repair | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if await addon.instance.exists(): | ||||||
|  |                 continue | ||||||
|  |             needs_repair.append(addon) | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Found %d add-ons to repair", len(needs_repair)) | ||||||
|  |         if not needs_repair: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         for addon in needs_repair: | ||||||
|  |             _LOGGER.info("Repairing for add-on: %s", addon.slug) | ||||||
|  |             with suppress(DockerError, KeyError): | ||||||
|  |                 # Need pull a image again | ||||||
|  |                 if not addon.need_build: | ||||||
|  |                     await addon.instance.install(addon.version, addon.image) | ||||||
|  |                     continue | ||||||
|  |  | ||||||
|  |                 # Need local lookup | ||||||
|  |                 if addon.need_build and not addon.is_detached: | ||||||
|  |                     store = self.store[addon.slug] | ||||||
|  |                     # If this add-on is available for rebuild | ||||||
|  |                     if addon.version == store.version: | ||||||
|  |                         await addon.instance.install(addon.version, addon.image) | ||||||
|  |                         continue | ||||||
|  |  | ||||||
|  |             _LOGGER.error("Can't repair %s", addon.slug) | ||||||
|  |             with suppress(AddonsError): | ||||||
|  |                 await self.uninstall(addon.slug) | ||||||
|  |  | ||||||
|  |     async def sync_dns(self) -> None: | ||||||
|  |         """Sync add-ons DNS names.""" | ||||||
|  |         # Update hosts | ||||||
|  |         add_host_coros: list[Awaitable[None]] = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             try: | ||||||
|  |                 if not await addon.instance.is_running(): | ||||||
|  |                     continue | ||||||
|  |             except DockerError as err: | ||||||
|  |                 _LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err) | ||||||
|  |                 self.sys_resolution.create_issue( | ||||||
|  |                     IssueType.CORRUPT_DOCKER, | ||||||
|  |                     ContextType.ADDON, | ||||||
|  |                     reference=addon.slug, | ||||||
|  |                     suggestions=[SuggestionType.EXECUTE_REPAIR], | ||||||
|  |                 ) | ||||||
|  |                 capture_exception(err) | ||||||
|  |             else: | ||||||
|  |                 add_host_coros.append( | ||||||
|  |                     self.sys_plugins.dns.add_host( | ||||||
|  |                         ipv4=addon.ip_address, names=[addon.hostname], write=False | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |         await asyncio.gather(*add_host_coros) | ||||||
|  |  | ||||||
|  |         # Write hosts files | ||||||
|  |         with suppress(CoreDNSError): | ||||||
|  |             await self.sys_plugins.dns.write_hosts() | ||||||
| @@ -1,14 +1,17 @@ | |||||||
| """Init file for Supervisor add-ons.""" | """Init file for Supervisor add-ons.""" | ||||||
| from abc import ABC, abstractmethod | from abc import ABC, abstractmethod | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| from collections.abc import Callable | from collections.abc import Awaitable, Callable | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
|  | from datetime import datetime | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from awesomeversion import AwesomeVersion, AwesomeVersionException | from awesomeversion import AwesomeVersion, AwesomeVersionException | ||||||
|  |  | ||||||
|  | from supervisor.utils.dt import utc_from_timestamp | ||||||
|  |  | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     ATTR_ADVANCED, |     ATTR_ADVANCED, | ||||||
|     ATTR_APPARMOR, |     ATTR_APPARMOR, | ||||||
| @@ -65,11 +68,13 @@ from ..const import ( | |||||||
|     ATTR_TIMEOUT, |     ATTR_TIMEOUT, | ||||||
|     ATTR_TMPFS, |     ATTR_TMPFS, | ||||||
|     ATTR_TRANSLATIONS, |     ATTR_TRANSLATIONS, | ||||||
|  |     ATTR_TYPE, | ||||||
|     ATTR_UART, |     ATTR_UART, | ||||||
|     ATTR_UDEV, |     ATTR_UDEV, | ||||||
|     ATTR_URL, |     ATTR_URL, | ||||||
|     ATTR_USB, |     ATTR_USB, | ||||||
|     ATTR_VERSION, |     ATTR_VERSION, | ||||||
|  |     ATTR_VERSION_TIMESTAMP, | ||||||
|     ATTR_VIDEO, |     ATTR_VIDEO, | ||||||
|     ATTR_WATCHDOG, |     ATTR_WATCHDOG, | ||||||
|     ATTR_WEBUI, |     ATTR_WEBUI, | ||||||
| @@ -86,9 +91,18 @@ from ..exceptions import AddonsNotSupportedError | |||||||
| from ..jobs.const import JOB_GROUP_ADDON | from ..jobs.const import JOB_GROUP_ADDON | ||||||
| from ..jobs.job_group import JobGroup | from ..jobs.job_group import JobGroup | ||||||
| from ..utils import version_is_new_enough | from ..utils import version_is_new_enough | ||||||
| from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode | from .configuration import FolderMapping | ||||||
|  | from .const import ( | ||||||
|  |     ATTR_BACKUP, | ||||||
|  |     ATTR_BREAKING_VERSIONS, | ||||||
|  |     ATTR_CODENOTARY, | ||||||
|  |     ATTR_PATH, | ||||||
|  |     ATTR_READ_ONLY, | ||||||
|  |     AddonBackupMode, | ||||||
|  |     MappingType, | ||||||
|  | ) | ||||||
| from .options import AddonOptions, UiOptions | from .options import AddonOptions, UiOptions | ||||||
| from .validate import RE_SERVICE, RE_VOLUME | from .validate import RE_SERVICE | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -104,6 +118,10 @@ class AddonModel(JobGroup, ABC): | |||||||
|             coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug |             coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug | ||||||
|         ) |         ) | ||||||
|         self.slug: str = slug |         self.slug: str = slug | ||||||
|  |         self._path_icon_exists: bool = False | ||||||
|  |         self._path_logo_exists: bool = False | ||||||
|  |         self._path_changelog_exists: bool = False | ||||||
|  |         self._path_documentation_exists: bool = False | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     @abstractmethod |     @abstractmethod | ||||||
| @@ -212,6 +230,11 @@ class AddonModel(JobGroup, ABC): | |||||||
|         """Return latest version of add-on.""" |         """Return latest version of add-on.""" | ||||||
|         return self.data[ATTR_VERSION] |         return self.data[ATTR_VERSION] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def latest_version_timestamp(self) -> datetime: | ||||||
|  |         """Return when latest version was first seen.""" | ||||||
|  |         return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP]) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def version(self) -> AwesomeVersion: |     def version(self) -> AwesomeVersion: | ||||||
|         """Return version of add-on.""" |         """Return version of add-on.""" | ||||||
| @@ -492,22 +515,22 @@ class AddonModel(JobGroup, ABC): | |||||||
|     @property |     @property | ||||||
|     def with_icon(self) -> bool: |     def with_icon(self) -> bool: | ||||||
|         """Return True if an icon exists.""" |         """Return True if an icon exists.""" | ||||||
|         return self.path_icon.exists() |         return self._path_icon_exists | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def with_logo(self) -> bool: |     def with_logo(self) -> bool: | ||||||
|         """Return True if a logo exists.""" |         """Return True if a logo exists.""" | ||||||
|         return self.path_logo.exists() |         return self._path_logo_exists | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def with_changelog(self) -> bool: |     def with_changelog(self) -> bool: | ||||||
|         """Return True if a changelog exists.""" |         """Return True if a changelog exists.""" | ||||||
|         return self.path_changelog.exists() |         return self._path_changelog_exists | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def with_documentation(self) -> bool: |     def with_documentation(self) -> bool: | ||||||
|         """Return True if a documentation exists.""" |         """Return True if a documentation exists.""" | ||||||
|         return self.path_documentation.exists() |         return self._path_documentation_exists | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def supported_arch(self) -> list[str]: |     def supported_arch(self) -> list[str]: | ||||||
| @@ -538,14 +561,13 @@ class AddonModel(JobGroup, ABC): | |||||||
|         return ATTR_IMAGE not in self.data |         return ATTR_IMAGE not in self.data | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def map_volumes(self) -> dict[str, bool]: |     def map_volumes(self) -> dict[MappingType, FolderMapping]: | ||||||
|         """Return a dict of {volume: read-only} from add-on.""" |         """Return a dict of {MappingType: FolderMapping} from add-on.""" | ||||||
|         volumes = {} |         volumes = {} | ||||||
|         for volume in self.data[ATTR_MAP]: |         for volume in self.data[ATTR_MAP]: | ||||||
|             result = RE_VOLUME.match(volume) |             volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping( | ||||||
|             if not result: |                 volume.get(ATTR_PATH), volume[ATTR_READ_ONLY] | ||||||
|                 continue |             ) | ||||||
|             volumes[result.group(1)] = result.group(2) != "rw" |  | ||||||
|  |  | ||||||
|         return volumes |         return volumes | ||||||
|  |  | ||||||
| @@ -612,6 +634,22 @@ class AddonModel(JobGroup, ABC): | |||||||
|         """Return Signer email address for CAS.""" |         """Return Signer email address for CAS.""" | ||||||
|         return self.data.get(ATTR_CODENOTARY) |         return self.data.get(ATTR_CODENOTARY) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def breaking_versions(self) -> list[AwesomeVersion]: | ||||||
|  |         """Return breaking versions of addon.""" | ||||||
|  |         return self.data[ATTR_BREAKING_VERSIONS] | ||||||
|  |  | ||||||
|  |     def refresh_path_cache(self) -> Awaitable[None]: | ||||||
|  |         """Refresh cache of existing paths.""" | ||||||
|  |  | ||||||
|  |         def check_paths(): | ||||||
|  |             self._path_icon_exists = self.path_icon.exists() | ||||||
|  |             self._path_logo_exists = self.path_logo.exists() | ||||||
|  |             self._path_changelog_exists = self.path_changelog.exists() | ||||||
|  |             self._path_documentation_exists = self.path_documentation.exists() | ||||||
|  |  | ||||||
|  |         return self.sys_run_in_executor(check_paths) | ||||||
|  |  | ||||||
|     def validate_availability(self) -> None: |     def validate_availability(self) -> None: | ||||||
|         """Validate if addon is available for current system.""" |         """Validate if addon is available for current system.""" | ||||||
|         return self._validate_availability(self.data, logger=_LOGGER.error) |         return self._validate_availability(self.data, logger=_LOGGER.error) | ||||||
|   | |||||||
| @@ -81,6 +81,7 @@ from ..const import ( | |||||||
|     ATTR_TIMEOUT, |     ATTR_TIMEOUT, | ||||||
|     ATTR_TMPFS, |     ATTR_TMPFS, | ||||||
|     ATTR_TRANSLATIONS, |     ATTR_TRANSLATIONS, | ||||||
|  |     ATTR_TYPE, | ||||||
|     ATTR_UART, |     ATTR_UART, | ||||||
|     ATTR_UDEV, |     ATTR_UDEV, | ||||||
|     ATTR_URL, |     ATTR_URL, | ||||||
| @@ -91,9 +92,6 @@ from ..const import ( | |||||||
|     ATTR_VIDEO, |     ATTR_VIDEO, | ||||||
|     ATTR_WATCHDOG, |     ATTR_WATCHDOG, | ||||||
|     ATTR_WEBUI, |     ATTR_WEBUI, | ||||||
|     MAP_ADDON_CONFIG, |  | ||||||
|     MAP_CONFIG, |  | ||||||
|     MAP_HOMEASSISTANT_CONFIG, |  | ||||||
|     ROLE_ALL, |     ROLE_ALL, | ||||||
|     ROLE_DEFAULT, |     ROLE_DEFAULT, | ||||||
|     AddonBoot, |     AddonBoot, | ||||||
| @@ -101,7 +99,6 @@ from ..const import ( | |||||||
|     AddonStartup, |     AddonStartup, | ||||||
|     AddonState, |     AddonState, | ||||||
| ) | ) | ||||||
| from ..discovery.validate import valid_discovery_service |  | ||||||
| from ..docker.const import Capabilities | from ..docker.const import Capabilities | ||||||
| from ..validate import ( | from ..validate import ( | ||||||
|     docker_image, |     docker_image, | ||||||
| @@ -112,13 +109,22 @@ from ..validate import ( | |||||||
|     uuid_match, |     uuid_match, | ||||||
|     version_tag, |     version_tag, | ||||||
| ) | ) | ||||||
| from .const import ATTR_BACKUP, ATTR_CODENOTARY, RE_SLUG, AddonBackupMode | from .const import ( | ||||||
|  |     ATTR_BACKUP, | ||||||
|  |     ATTR_BREAKING_VERSIONS, | ||||||
|  |     ATTR_CODENOTARY, | ||||||
|  |     ATTR_PATH, | ||||||
|  |     ATTR_READ_ONLY, | ||||||
|  |     RE_SLUG, | ||||||
|  |     AddonBackupMode, | ||||||
|  |     MappingType, | ||||||
|  | ) | ||||||
| from .options import RE_SCHEMA_ELEMENT | from .options import RE_SCHEMA_ELEMENT | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| RE_VOLUME = re.compile( | RE_VOLUME = re.compile( | ||||||
|     r"^(config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$" |     r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$" | ||||||
| ) | ) | ||||||
| RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") | RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") | ||||||
|  |  | ||||||
| @@ -148,6 +154,7 @@ RE_MACHINE = re.compile( | |||||||
|     r"|raspberrypi3" |     r"|raspberrypi3" | ||||||
|     r"|raspberrypi4-64" |     r"|raspberrypi4-64" | ||||||
|     r"|raspberrypi4" |     r"|raspberrypi4" | ||||||
|  |     r"|raspberrypi5-64" | ||||||
|     r"|yellow" |     r"|yellow" | ||||||
|     r"|green" |     r"|green" | ||||||
|     r"|tinker" |     r"|tinker" | ||||||
| @@ -182,20 +189,6 @@ def _warn_addon_config(config: dict[str, Any]): | |||||||
|             name, |             name, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     invalid_services: list[str] = [] |  | ||||||
|     for service in config.get(ATTR_DISCOVERY, []): |  | ||||||
|         try: |  | ||||||
|             valid_discovery_service(service) |  | ||||||
|         except vol.Invalid: |  | ||||||
|             invalid_services.append(service) |  | ||||||
|  |  | ||||||
|     if invalid_services: |  | ||||||
|         _LOGGER.warning( |  | ||||||
|             "Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s", |  | ||||||
|             ", ".join(invalid_services), |  | ||||||
|             name, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     return config |     return config | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -265,26 +258,45 @@ def _migrate_addon_config(protocol=False): | |||||||
|                     name, |                     name, | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  |         # 2023-11 "map" entries can also be dict to allow path configuration | ||||||
|  |         volumes = [] | ||||||
|  |         for entry in config.get(ATTR_MAP, []): | ||||||
|  |             if isinstance(entry, dict): | ||||||
|  |                 volumes.append(entry) | ||||||
|  |             if isinstance(entry, str): | ||||||
|  |                 result = RE_VOLUME.match(entry) | ||||||
|  |                 if not result: | ||||||
|  |                     continue | ||||||
|  |                 volumes.append( | ||||||
|  |                     { | ||||||
|  |                         ATTR_TYPE: result.group(1), | ||||||
|  |                         ATTR_READ_ONLY: result.group(2) != "rw", | ||||||
|  |                     } | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |         if volumes: | ||||||
|  |             config[ATTR_MAP] = volumes | ||||||
|  |  | ||||||
|         # 2023-10 "config" became "homeassistant" so /config can be used for addon's public config |         # 2023-10 "config" became "homeassistant" so /config can be used for addon's public config | ||||||
|         volumes = [RE_VOLUME.match(entry) for entry in config.get(ATTR_MAP, [])] |         if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes): | ||||||
|         if any(volume and volume.group(1) == MAP_CONFIG for volume in volumes): |  | ||||||
|             if any( |             if any( | ||||||
|                 volume |                 volume | ||||||
|                 and volume.group(1) in {MAP_ADDON_CONFIG, MAP_HOMEASSISTANT_CONFIG} |                 and volume[ATTR_TYPE] | ||||||
|  |                 in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG} | ||||||
|                 for volume in volumes |                 for volume in volumes | ||||||
|             ): |             ): | ||||||
|                 _LOGGER.warning( |                 _LOGGER.warning( | ||||||
|                     "Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s", |                     "Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s", | ||||||
|                     MAP_ADDON_CONFIG, |                     MappingType.ADDON_CONFIG, | ||||||
|                     MAP_HOMEASSISTANT_CONFIG, |                     MappingType.HOMEASSISTANT_CONFIG, | ||||||
|                     MAP_CONFIG, |                     MappingType.CONFIG, | ||||||
|                     name, |                     name, | ||||||
|                 ) |                 ) | ||||||
|             else: |             else: | ||||||
|                 _LOGGER.debug( |                 _LOGGER.debug( | ||||||
|                     "Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s", |                     "Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s", | ||||||
|                     MAP_CONFIG, |                     MappingType.CONFIG, | ||||||
|                     MAP_HOMEASSISTANT_CONFIG, |                     MappingType.HOMEASSISTANT_CONFIG, | ||||||
|                     name, |                     name, | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
| @@ -336,7 +348,15 @@ _SCHEMA_ADDON_CONFIG = vol.Schema( | |||||||
|         vol.Optional(ATTR_DEVICES): [str], |         vol.Optional(ATTR_DEVICES): [str], | ||||||
|         vol.Optional(ATTR_UDEV, default=False): vol.Boolean(), |         vol.Optional(ATTR_UDEV, default=False): vol.Boolean(), | ||||||
|         vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(), |         vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(), | ||||||
|         vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)], |         vol.Optional(ATTR_MAP, default=list): [ | ||||||
|  |             vol.Schema( | ||||||
|  |                 { | ||||||
|  |                     vol.Required(ATTR_TYPE): vol.Coerce(MappingType), | ||||||
|  |                     vol.Optional(ATTR_READ_ONLY, default=True): bool, | ||||||
|  |                     vol.Optional(ATTR_PATH): str, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         ], | ||||||
|         vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str}, |         vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str}, | ||||||
|         vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)], |         vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)], | ||||||
|         vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(), |         vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(), | ||||||
| @@ -388,6 +408,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema( | |||||||
|             vol.Coerce(int), vol.Range(min=10, max=300) |             vol.Coerce(int), vol.Range(min=10, max=300) | ||||||
|         ), |         ), | ||||||
|         vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(), |         vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag], | ||||||
|     }, |     }, | ||||||
|     extra=vol.REMOVE_EXTRA, |     extra=vol.REMOVE_EXTRA, | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -9,12 +9,14 @@ from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispa | |||||||
|  |  | ||||||
| from ..const import AddonState | from ..const import AddonState | ||||||
| from ..coresys import CoreSys, CoreSysAttributes | from ..coresys import CoreSys, CoreSysAttributes | ||||||
| from ..exceptions import APIAddonNotInstalled | from ..exceptions import APIAddonNotInstalled, HostNotSupportedError | ||||||
|  | from ..utils.sentry import capture_exception | ||||||
| from .addons import APIAddons | from .addons import APIAddons | ||||||
| from .audio import APIAudio | from .audio import APIAudio | ||||||
| from .auth import APIAuth | from .auth import APIAuth | ||||||
| from .backups import APIBackups | from .backups import APIBackups | ||||||
| from .cli import APICli | from .cli import APICli | ||||||
|  | from .const import CONTENT_TYPE_TEXT | ||||||
| from .discovery import APIDiscovery | from .discovery import APIDiscovery | ||||||
| from .dns import APICoreDNS | from .dns import APICoreDNS | ||||||
| from .docker import APIDocker | from .docker import APIDocker | ||||||
| @@ -36,7 +38,7 @@ from .security import APISecurity | |||||||
| from .services import APIServices | from .services import APIServices | ||||||
| from .store import APIStore | from .store import APIStore | ||||||
| from .supervisor import APISupervisor | from .supervisor import APISupervisor | ||||||
| from .utils import api_process | from .utils import api_process, api_process_raw | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -71,8 +73,14 @@ class RestAPI(CoreSysAttributes): | |||||||
|         self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5) |         self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5) | ||||||
|         self._site: web.TCPSite | None = None |         self._site: web.TCPSite | None = None | ||||||
|  |  | ||||||
|  |         # share single host API handler for reuse in logging endpoints | ||||||
|  |         self._api_host: APIHost | None = None | ||||||
|  |  | ||||||
|     async def load(self) -> None: |     async def load(self) -> None: | ||||||
|         """Register REST API Calls.""" |         """Register REST API Calls.""" | ||||||
|  |         self._api_host = APIHost() | ||||||
|  |         self._api_host.coresys = self.coresys | ||||||
|  |  | ||||||
|         self._register_addons() |         self._register_addons() | ||||||
|         self._register_audio() |         self._register_audio() | ||||||
|         self._register_auth() |         self._register_auth() | ||||||
| @@ -102,10 +110,41 @@ class RestAPI(CoreSysAttributes): | |||||||
|  |  | ||||||
|         await self.start() |         await self.start() | ||||||
|  |  | ||||||
|  |     def _register_advanced_logs(self, path: str, syslog_identifier: str): | ||||||
|  |         """Register logs endpoint for a given path, returning logs for single syslog identifier.""" | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get( | ||||||
|  |                     f"{path}/logs", | ||||||
|  |                     partial(self._api_host.advanced_logs, identifier=syslog_identifier), | ||||||
|  |                 ), | ||||||
|  |                 web.get( | ||||||
|  |                     f"{path}/logs/follow", | ||||||
|  |                     partial( | ||||||
|  |                         self._api_host.advanced_logs, | ||||||
|  |                         identifier=syslog_identifier, | ||||||
|  |                         follow=True, | ||||||
|  |                     ), | ||||||
|  |                 ), | ||||||
|  |                 web.get( | ||||||
|  |                     f"{path}/logs/boots/{{bootid}}", | ||||||
|  |                     partial(self._api_host.advanced_logs, identifier=syslog_identifier), | ||||||
|  |                 ), | ||||||
|  |                 web.get( | ||||||
|  |                     f"{path}/logs/boots/{{bootid}}/follow", | ||||||
|  |                     partial( | ||||||
|  |                         self._api_host.advanced_logs, | ||||||
|  |                         identifier=syslog_identifier, | ||||||
|  |                         follow=True, | ||||||
|  |                     ), | ||||||
|  |                 ), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def _register_host(self) -> None: |     def _register_host(self) -> None: | ||||||
|         """Register hostcontrol functions.""" |         """Register hostcontrol functions.""" | ||||||
|         api_host = APIHost() |         api_host = self._api_host | ||||||
|         api_host.coresys = self.coresys |  | ||||||
|  |  | ||||||
|         self.webapp.add_routes( |         self.webapp.add_routes( | ||||||
|             [ |             [ | ||||||
| @@ -182,6 +221,8 @@ class RestAPI(CoreSysAttributes): | |||||||
|                 web.post("/os/config/sync", api_os.config_sync), |                 web.post("/os/config/sync", api_os.config_sync), | ||||||
|                 web.post("/os/datadisk/move", api_os.migrate_data), |                 web.post("/os/datadisk/move", api_os.migrate_data), | ||||||
|                 web.get("/os/datadisk/list", api_os.list_data), |                 web.get("/os/datadisk/list", api_os.list_data), | ||||||
|  |                 web.post("/os/datadisk/wipe", api_os.wipe_data), | ||||||
|  |                 web.post("/os/boot-slot", api_os.set_boot_slot), | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
| @@ -219,6 +260,8 @@ class RestAPI(CoreSysAttributes): | |||||||
|                 web.get("/jobs/info", api_jobs.info), |                 web.get("/jobs/info", api_jobs.info), | ||||||
|                 web.post("/jobs/options", api_jobs.options), |                 web.post("/jobs/options", api_jobs.options), | ||||||
|                 web.post("/jobs/reset", api_jobs.reset), |                 web.post("/jobs/reset", api_jobs.reset), | ||||||
|  |                 web.get("/jobs/{uuid}", api_jobs.job_info), | ||||||
|  |                 web.delete("/jobs/{uuid}", api_jobs.remove_job), | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
| @@ -257,11 +300,11 @@ class RestAPI(CoreSysAttributes): | |||||||
|             [ |             [ | ||||||
|                 web.get("/multicast/info", api_multicast.info), |                 web.get("/multicast/info", api_multicast.info), | ||||||
|                 web.get("/multicast/stats", api_multicast.stats), |                 web.get("/multicast/stats", api_multicast.stats), | ||||||
|                 web.get("/multicast/logs", api_multicast.logs), |  | ||||||
|                 web.post("/multicast/update", api_multicast.update), |                 web.post("/multicast/update", api_multicast.update), | ||||||
|                 web.post("/multicast/restart", api_multicast.restart), |                 web.post("/multicast/restart", api_multicast.restart), | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |         self._register_advanced_logs("/multicast", "hassio_multicast") | ||||||
|  |  | ||||||
|     def _register_hardware(self) -> None: |     def _register_hardware(self) -> None: | ||||||
|         """Register hardware functions.""" |         """Register hardware functions.""" | ||||||
| @@ -334,6 +377,7 @@ class RestAPI(CoreSysAttributes): | |||||||
|                 web.post("/auth", api_auth.auth), |                 web.post("/auth", api_auth.auth), | ||||||
|                 web.post("/auth/reset", api_auth.reset), |                 web.post("/auth/reset", api_auth.reset), | ||||||
|                 web.delete("/auth/cache", api_auth.cache), |                 web.delete("/auth/cache", api_auth.cache), | ||||||
|  |                 web.get("/auth/list", api_auth.list_users), | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
| @@ -347,7 +391,6 @@ class RestAPI(CoreSysAttributes): | |||||||
|                 web.get("/supervisor/ping", api_supervisor.ping), |                 web.get("/supervisor/ping", api_supervisor.ping), | ||||||
|                 web.get("/supervisor/info", api_supervisor.info), |                 web.get("/supervisor/info", api_supervisor.info), | ||||||
|                 web.get("/supervisor/stats", api_supervisor.stats), |                 web.get("/supervisor/stats", api_supervisor.stats), | ||||||
|                 web.get("/supervisor/logs", api_supervisor.logs), |  | ||||||
|                 web.post("/supervisor/update", api_supervisor.update), |                 web.post("/supervisor/update", api_supervisor.update), | ||||||
|                 web.post("/supervisor/reload", api_supervisor.reload), |                 web.post("/supervisor/reload", api_supervisor.reload), | ||||||
|                 web.post("/supervisor/restart", api_supervisor.restart), |                 web.post("/supervisor/restart", api_supervisor.restart), | ||||||
| @@ -356,6 +399,38 @@ class RestAPI(CoreSysAttributes): | |||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         async def get_supervisor_logs(*args, **kwargs): | ||||||
|  |             try: | ||||||
|  |                 return await self._api_host.advanced_logs_handler( | ||||||
|  |                     *args, identifier="hassio_supervisor", **kwargs | ||||||
|  |                 ) | ||||||
|  |             except Exception as err:  # pylint: disable=broad-exception-caught | ||||||
|  |                 # Supervisor logs are critical, so catch everything, log the exception | ||||||
|  |                 # and try to return Docker container logs as the fallback | ||||||
|  |                 _LOGGER.exception( | ||||||
|  |                     "Failed to get supervisor logs using advanced_logs API" | ||||||
|  |                 ) | ||||||
|  |                 if not isinstance(err, HostNotSupportedError): | ||||||
|  |                     # No need to capture HostNotSupportedError to Sentry, the cause | ||||||
|  |                     # is known and reported to the user using the resolution center. | ||||||
|  |                     capture_exception(err) | ||||||
|  |                 return await api_supervisor.logs(*args, **kwargs) | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/supervisor/logs", get_supervisor_logs), | ||||||
|  |                 web.get( | ||||||
|  |                     "/supervisor/logs/follow", | ||||||
|  |                     partial(get_supervisor_logs, follow=True), | ||||||
|  |                 ), | ||||||
|  |                 web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs), | ||||||
|  |                 web.get( | ||||||
|  |                     "/supervisor/logs/boots/{bootid}/follow", | ||||||
|  |                     partial(get_supervisor_logs, follow=True), | ||||||
|  |                 ), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def _register_homeassistant(self) -> None: |     def _register_homeassistant(self) -> None: | ||||||
|         """Register Home Assistant functions.""" |         """Register Home Assistant functions.""" | ||||||
|         api_hass = APIHomeAssistant() |         api_hass = APIHomeAssistant() | ||||||
| @@ -364,7 +439,6 @@ class RestAPI(CoreSysAttributes): | |||||||
|         self.webapp.add_routes( |         self.webapp.add_routes( | ||||||
|             [ |             [ | ||||||
|                 web.get("/core/info", api_hass.info), |                 web.get("/core/info", api_hass.info), | ||||||
|                 web.get("/core/logs", api_hass.logs), |  | ||||||
|                 web.get("/core/stats", api_hass.stats), |                 web.get("/core/stats", api_hass.stats), | ||||||
|                 web.post("/core/options", api_hass.options), |                 web.post("/core/options", api_hass.options), | ||||||
|                 web.post("/core/update", api_hass.update), |                 web.post("/core/update", api_hass.update), | ||||||
| @@ -376,11 +450,12 @@ class RestAPI(CoreSysAttributes): | |||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         self._register_advanced_logs("/core", "homeassistant") | ||||||
|  |  | ||||||
|         # Reroute from legacy |         # Reroute from legacy | ||||||
|         self.webapp.add_routes( |         self.webapp.add_routes( | ||||||
|             [ |             [ | ||||||
|                 web.get("/homeassistant/info", api_hass.info), |                 web.get("/homeassistant/info", api_hass.info), | ||||||
|                 web.get("/homeassistant/logs", api_hass.logs), |  | ||||||
|                 web.get("/homeassistant/stats", api_hass.stats), |                 web.get("/homeassistant/stats", api_hass.stats), | ||||||
|                 web.post("/homeassistant/options", api_hass.options), |                 web.post("/homeassistant/options", api_hass.options), | ||||||
|                 web.post("/homeassistant/restart", api_hass.restart), |                 web.post("/homeassistant/restart", api_hass.restart), | ||||||
| @@ -392,6 +467,8 @@ class RestAPI(CoreSysAttributes): | |||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         self._register_advanced_logs("/homeassistant", "homeassistant") | ||||||
|  |  | ||||||
|     def _register_proxy(self) -> None: |     def _register_proxy(self) -> None: | ||||||
|         """Register Home Assistant API Proxy.""" |         """Register Home Assistant API Proxy.""" | ||||||
|         api_proxy = APIProxy() |         api_proxy = APIProxy() | ||||||
| @@ -438,13 +515,33 @@ class RestAPI(CoreSysAttributes): | |||||||
|                 ), |                 ), | ||||||
|                 web.get("/addons/{addon}/options/config", api_addons.options_config), |                 web.get("/addons/{addon}/options/config", api_addons.options_config), | ||||||
|                 web.post("/addons/{addon}/rebuild", api_addons.rebuild), |                 web.post("/addons/{addon}/rebuild", api_addons.rebuild), | ||||||
|                 web.get("/addons/{addon}/logs", api_addons.logs), |  | ||||||
|                 web.post("/addons/{addon}/stdin", api_addons.stdin), |                 web.post("/addons/{addon}/stdin", api_addons.stdin), | ||||||
|                 web.post("/addons/{addon}/security", api_addons.security), |                 web.post("/addons/{addon}/security", api_addons.security), | ||||||
|                 web.get("/addons/{addon}/stats", api_addons.stats), |                 web.get("/addons/{addon}/stats", api_addons.stats), | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         @api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT) | ||||||
|  |         async def get_addon_logs(request, *args, **kwargs): | ||||||
|  |             addon = api_addons.get_addon_for_request(request) | ||||||
|  |             kwargs["identifier"] = f"addon_{addon.slug}" | ||||||
|  |             return await self._api_host.advanced_logs(request, *args, **kwargs) | ||||||
|  |  | ||||||
|  |         self.webapp.add_routes( | ||||||
|  |             [ | ||||||
|  |                 web.get("/addons/{addon}/logs", get_addon_logs), | ||||||
|  |                 web.get( | ||||||
|  |                     "/addons/{addon}/logs/follow", | ||||||
|  |                     partial(get_addon_logs, follow=True), | ||||||
|  |                 ), | ||||||
|  |                 web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs), | ||||||
|  |                 web.get( | ||||||
|  |                     "/addons/{addon}/logs/boots/{bootid}/follow", | ||||||
|  |                     partial(get_addon_logs, follow=True), | ||||||
|  |                 ), | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         # Legacy routing to support requests for not installed addons |         # Legacy routing to support requests for not installed addons | ||||||
|         api_store = APIStore() |         api_store = APIStore() | ||||||
|         api_store.coresys = self.coresys |         api_store.coresys = self.coresys | ||||||
| @@ -542,7 +639,6 @@ class RestAPI(CoreSysAttributes): | |||||||
|             [ |             [ | ||||||
|                 web.get("/dns/info", api_dns.info), |                 web.get("/dns/info", api_dns.info), | ||||||
|                 web.get("/dns/stats", api_dns.stats), |                 web.get("/dns/stats", api_dns.stats), | ||||||
|                 web.get("/dns/logs", api_dns.logs), |  | ||||||
|                 web.post("/dns/update", api_dns.update), |                 web.post("/dns/update", api_dns.update), | ||||||
|                 web.post("/dns/options", api_dns.options), |                 web.post("/dns/options", api_dns.options), | ||||||
|                 web.post("/dns/restart", api_dns.restart), |                 web.post("/dns/restart", api_dns.restart), | ||||||
| @@ -550,18 +646,17 @@ class RestAPI(CoreSysAttributes): | |||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         self._register_advanced_logs("/dns", "hassio_dns") | ||||||
|  |  | ||||||
|     def _register_audio(self) -> None: |     def _register_audio(self) -> None: | ||||||
|         """Register Audio functions.""" |         """Register Audio functions.""" | ||||||
|         api_audio = APIAudio() |         api_audio = APIAudio() | ||||||
|         api_audio.coresys = self.coresys |         api_audio.coresys = self.coresys | ||||||
|         api_host = APIHost() |  | ||||||
|         api_host.coresys = self.coresys |  | ||||||
|  |  | ||||||
|         self.webapp.add_routes( |         self.webapp.add_routes( | ||||||
|             [ |             [ | ||||||
|                 web.get("/audio/info", api_audio.info), |                 web.get("/audio/info", api_audio.info), | ||||||
|                 web.get("/audio/stats", api_audio.stats), |                 web.get("/audio/stats", api_audio.stats), | ||||||
|                 web.get("/audio/logs", api_audio.logs), |  | ||||||
|                 web.post("/audio/update", api_audio.update), |                 web.post("/audio/update", api_audio.update), | ||||||
|                 web.post("/audio/restart", api_audio.restart), |                 web.post("/audio/restart", api_audio.restart), | ||||||
|                 web.post("/audio/reload", api_audio.reload), |                 web.post("/audio/reload", api_audio.reload), | ||||||
| @@ -574,6 +669,8 @@ class RestAPI(CoreSysAttributes): | |||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         self._register_advanced_logs("/audio", "hassio_audio") | ||||||
|  |  | ||||||
|     def _register_mounts(self) -> None: |     def _register_mounts(self) -> None: | ||||||
|         """Register mounts endpoints.""" |         """Register mounts endpoints.""" | ||||||
|         api_mounts = APIMounts() |         api_mounts = APIMounts() | ||||||
| @@ -600,7 +697,6 @@ class RestAPI(CoreSysAttributes): | |||||||
|                 web.get("/store", api_store.store_info), |                 web.get("/store", api_store.store_info), | ||||||
|                 web.get("/store/addons", api_store.addons_list), |                 web.get("/store/addons", api_store.addons_list), | ||||||
|                 web.get("/store/addons/{addon}", api_store.addons_addon_info), |                 web.get("/store/addons/{addon}", api_store.addons_addon_info), | ||||||
|                 web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info), |  | ||||||
|                 web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon), |                 web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon), | ||||||
|                 web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo), |                 web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo), | ||||||
|                 web.get( |                 web.get( | ||||||
| @@ -622,6 +718,8 @@ class RestAPI(CoreSysAttributes): | |||||||
|                     "/store/addons/{addon}/update/{version}", |                     "/store/addons/{addon}/update/{version}", | ||||||
|                     api_store.addons_addon_update, |                     api_store.addons_addon_update, | ||||||
|                 ), |                 ), | ||||||
|  |                 # Must be below others since it has a wildcard in resource path | ||||||
|  |                 web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info), | ||||||
|                 web.post("/store/reload", api_store.reload), |                 web.post("/store/reload", api_store.reload), | ||||||
|                 web.get("/store/repositories", api_store.repositories_list), |                 web.get("/store/repositories", api_store.repositories_list), | ||||||
|                 web.get( |                 web.get( | ||||||
|   | |||||||
| @@ -8,8 +8,8 @@ from aiohttp import web | |||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
| from voluptuous.humanize import humanize_error | from voluptuous.humanize import humanize_error | ||||||
|  |  | ||||||
| from ..addons import AnyAddon |  | ||||||
| from ..addons.addon import Addon | from ..addons.addon import Addon | ||||||
|  | from ..addons.manager import AnyAddon | ||||||
| from ..addons.utils import rating_security | from ..addons.utils import rating_security | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     ATTR_ADDONS, |     ATTR_ADDONS, | ||||||
| @@ -106,8 +106,8 @@ from ..exceptions import ( | |||||||
|     PwnedSecret, |     PwnedSecret, | ||||||
| ) | ) | ||||||
| from ..validate import docker_ports | from ..validate import docker_ports | ||||||
| from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY | from .const import ATTR_REMOVE_CONFIG, ATTR_SIGNED | ||||||
| from .utils import api_process, api_process_raw, api_validate, json_loads | from .utils import api_process, api_validate, json_loads | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -126,15 +126,19 @@ SCHEMA_OPTIONS = vol.Schema( | |||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()}) | SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()}) | ||||||
|  |  | ||||||
|  | SCHEMA_UNINSTALL = vol.Schema( | ||||||
|  |     {vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()} | ||||||
|  | ) | ||||||
|  | # pylint: enable=no-value-for-parameter | ||||||
|  |  | ||||||
|  |  | ||||||
| class APIAddons(CoreSysAttributes): | class APIAddons(CoreSysAttributes): | ||||||
|     """Handle RESTful API for add-on functions.""" |     """Handle RESTful API for add-on functions.""" | ||||||
|  |  | ||||||
|     def _extract_addon(self, request: web.Request) -> Addon: |     def get_addon_for_request(self, request: web.Request) -> Addon: | ||||||
|         """Return addon, throw an exception it it doesn't exist.""" |         """Return addon, throw an exception if it doesn't exist.""" | ||||||
|         addon_slug: str = request.match_info.get("addon") |         addon_slug: str = request.match_info.get("addon") | ||||||
|  |  | ||||||
|         # Lookup itself |         # Lookup itself | ||||||
| @@ -187,7 +191,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|  |  | ||||||
|     async def info(self, request: web.Request) -> dict[str, Any]: |     async def info(self, request: web.Request) -> dict[str, Any]: | ||||||
|         """Return add-on information.""" |         """Return add-on information.""" | ||||||
|         addon: AnyAddon = self._extract_addon(request) |         addon: AnyAddon = self.get_addon_for_request(request) | ||||||
|  |  | ||||||
|         data = { |         data = { | ||||||
|             ATTR_NAME: addon.name, |             ATTR_NAME: addon.name, | ||||||
| @@ -268,7 +272,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|     @api_process |     @api_process | ||||||
|     async def options(self, request: web.Request) -> None: |     async def options(self, request: web.Request) -> None: | ||||||
|         """Store user options for add-on.""" |         """Store user options for add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|  |  | ||||||
|         # Update secrets for validation |         # Update secrets for validation | ||||||
|         await self.sys_homeassistant.secrets.reload() |         await self.sys_homeassistant.secrets.reload() | ||||||
| @@ -303,7 +307,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|     @api_process |     @api_process | ||||||
|     async def options_validate(self, request: web.Request) -> None: |     async def options_validate(self, request: web.Request) -> None: | ||||||
|         """Validate user options for add-on.""" |         """Validate user options for add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|         data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False} |         data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False} | ||||||
|  |  | ||||||
|         options = await request.json(loads=json_loads) or addon.options |         options = await request.json(loads=json_loads) or addon.options | ||||||
| @@ -345,7 +349,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|         slug: str = request.match_info.get("addon") |         slug: str = request.match_info.get("addon") | ||||||
|         if slug != "self": |         if slug != "self": | ||||||
|             raise APIForbidden("This can be only read by the Add-on itself!") |             raise APIForbidden("This can be only read by the Add-on itself!") | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|  |  | ||||||
|         # Lookup/reload secrets |         # Lookup/reload secrets | ||||||
|         await self.sys_homeassistant.secrets.reload() |         await self.sys_homeassistant.secrets.reload() | ||||||
| @@ -357,7 +361,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|     @api_process |     @api_process | ||||||
|     async def security(self, request: web.Request) -> None: |     async def security(self, request: web.Request) -> None: | ||||||
|         """Store security options for add-on.""" |         """Store security options for add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|         body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) |         body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) | ||||||
|  |  | ||||||
|         if ATTR_PROTECTED in body: |         if ATTR_PROTECTED in body: | ||||||
| @@ -369,7 +373,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|     @api_process |     @api_process | ||||||
|     async def stats(self, request: web.Request) -> dict[str, Any]: |     async def stats(self, request: web.Request) -> dict[str, Any]: | ||||||
|         """Return resource information.""" |         """Return resource information.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|  |  | ||||||
|         stats: DockerStats = await addon.stats() |         stats: DockerStats = await addon.stats() | ||||||
|  |  | ||||||
| @@ -385,48 +389,47 @@ class APIAddons(CoreSysAttributes): | |||||||
|         } |         } | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def uninstall(self, request: web.Request) -> Awaitable[None]: |     async def uninstall(self, request: web.Request) -> Awaitable[None]: | ||||||
|         """Uninstall add-on.""" |         """Uninstall add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|         return asyncio.shield(self.sys_addons.uninstall(addon.slug)) |         body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request) | ||||||
|  |         return await asyncio.shield( | ||||||
|  |             self.sys_addons.uninstall( | ||||||
|  |                 addon.slug, remove_config=body[ATTR_REMOVE_CONFIG] | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def start(self, request: web.Request) -> None: |     async def start(self, request: web.Request) -> None: | ||||||
|         """Start add-on.""" |         """Start add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|         if start_task := await asyncio.shield(addon.start()): |         if start_task := await asyncio.shield(addon.start()): | ||||||
|             await start_task |             await start_task | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def stop(self, request: web.Request) -> Awaitable[None]: |     def stop(self, request: web.Request) -> Awaitable[None]: | ||||||
|         """Stop add-on.""" |         """Stop add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|         return asyncio.shield(addon.stop()) |         return asyncio.shield(addon.stop()) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def restart(self, request: web.Request) -> None: |     async def restart(self, request: web.Request) -> None: | ||||||
|         """Restart add-on.""" |         """Restart add-on.""" | ||||||
|         addon: Addon = self._extract_addon(request) |         addon: Addon = self.get_addon_for_request(request) | ||||||
|         if start_task := await asyncio.shield(addon.restart()): |         if start_task := await asyncio.shield(addon.restart()): | ||||||
|             await start_task |             await start_task | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def rebuild(self, request: web.Request) -> None: |     async def rebuild(self, request: web.Request) -> None: | ||||||
|         """Rebuild local build add-on.""" |         """Rebuild local build add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|         if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)): |         if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)): | ||||||
|             await start_task |             await start_task | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |  | ||||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: |  | ||||||
|         """Return logs from add-on.""" |  | ||||||
|         addon = self._extract_addon(request) |  | ||||||
|         return addon.logs() |  | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def stdin(self, request: web.Request) -> None: |     async def stdin(self, request: web.Request) -> None: | ||||||
|         """Write to stdin of add-on.""" |         """Write to stdin of add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self.get_addon_for_request(request) | ||||||
|         if not addon.with_stdin: |         if not addon.with_stdin: | ||||||
|             raise APIError(f"STDIN not supported the {addon.slug} add-on") |             raise APIError(f"STDIN not supported the {addon.slug} add-on") | ||||||
|  |  | ||||||
|   | |||||||
| @@ -35,8 +35,7 @@ from ..coresys import CoreSysAttributes | |||||||
| from ..exceptions import APIError | from ..exceptions import APIError | ||||||
| from ..host.sound import StreamType | from ..host.sound import StreamType | ||||||
| from ..validate import version_tag | from ..validate import version_tag | ||||||
| from .const import CONTENT_TYPE_BINARY | from .utils import api_process, api_validate | ||||||
| from .utils import api_process, api_process_raw, api_validate |  | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -111,11 +110,6 @@ class APIAudio(CoreSysAttributes): | |||||||
|             raise APIError(f"Version {version} is already in use") |             raise APIError(f"Version {version} is already in use") | ||||||
|         await asyncio.shield(self.sys_plugins.audio.update(version)) |         await asyncio.shield(self.sys_plugins.audio.update(version)) | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |  | ||||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: |  | ||||||
|         """Return Audio Docker logs.""" |  | ||||||
|         return self.sys_plugins.audio.logs() |  | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def restart(self, request: web.Request) -> Awaitable[None]: |     def restart(self, request: web.Request) -> Awaitable[None]: | ||||||
|         """Restart Audio plugin.""" |         """Restart Audio plugin.""" | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| """Init file for Supervisor auth/SSO RESTful API.""" | """Init file for Supervisor auth/SSO RESTful API.""" | ||||||
| import asyncio | import asyncio | ||||||
| import logging | import logging | ||||||
|  | from typing import Any | ||||||
|  |  | ||||||
| from aiohttp import BasicAuth, web | from aiohttp import BasicAuth, web | ||||||
| from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE | from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE | ||||||
| @@ -8,10 +9,19 @@ from aiohttp.web_exceptions import HTTPUnauthorized | |||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from ..addons.addon import Addon | from ..addons.addon import Addon | ||||||
| from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM | from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIForbidden | from ..exceptions import APIForbidden | ||||||
| from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL | from ..utils.json import json_loads | ||||||
|  | from .const import ( | ||||||
|  |     ATTR_GROUP_IDS, | ||||||
|  |     ATTR_IS_ACTIVE, | ||||||
|  |     ATTR_IS_OWNER, | ||||||
|  |     ATTR_LOCAL_ONLY, | ||||||
|  |     ATTR_USERS, | ||||||
|  |     CONTENT_TYPE_JSON, | ||||||
|  |     CONTENT_TYPE_URL, | ||||||
|  | ) | ||||||
| from .utils import api_process, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
| @@ -67,7 +77,7 @@ class APIAuth(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Json |         # Json | ||||||
|         if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: |         if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: | ||||||
|             data = await request.json() |             data = await request.json(loads=json_loads) | ||||||
|             return await self._process_dict(request, addon, data) |             return await self._process_dict(request, addon, data) | ||||||
|  |  | ||||||
|         # URL encoded |         # URL encoded | ||||||
| @@ -89,3 +99,21 @@ class APIAuth(CoreSysAttributes): | |||||||
|     async def cache(self, request: web.Request) -> None: |     async def cache(self, request: web.Request) -> None: | ||||||
|         """Process cache reset request.""" |         """Process cache reset request.""" | ||||||
|         self.sys_auth.reset_data() |         self.sys_auth.reset_data() | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]: | ||||||
|  |         """List users on the Home Assistant instance.""" | ||||||
|  |         return { | ||||||
|  |             ATTR_USERS: [ | ||||||
|  |                 { | ||||||
|  |                     ATTR_USERNAME: user[ATTR_USERNAME], | ||||||
|  |                     ATTR_NAME: user[ATTR_NAME], | ||||||
|  |                     ATTR_IS_OWNER: user[ATTR_IS_OWNER], | ||||||
|  |                     ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE], | ||||||
|  |                     ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY], | ||||||
|  |                     ATTR_GROUP_IDS: user[ATTR_GROUP_IDS], | ||||||
|  |                 } | ||||||
|  |                 for user in await self.sys_auth.list_users() | ||||||
|  |                 if user[ATTR_USERNAME] | ||||||
|  |             ] | ||||||
|  |         } | ||||||
|   | |||||||
| @@ -1,5 +1,7 @@ | |||||||
| """Backups RESTful API.""" | """Backups RESTful API.""" | ||||||
| import asyncio | import asyncio | ||||||
|  | from collections.abc import Callable | ||||||
|  | import errno | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import re | import re | ||||||
| @@ -10,6 +12,7 @@ from aiohttp import web | |||||||
| from aiohttp.hdrs import CONTENT_DISPOSITION | from aiohttp.hdrs import CONTENT_DISPOSITION | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
|  | from ..backups.backup import Backup | ||||||
| from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale | from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     ATTR_ADDONS, |     ATTR_ADDONS, | ||||||
| @@ -32,11 +35,15 @@ from ..const import ( | |||||||
|     ATTR_TIMEOUT, |     ATTR_TIMEOUT, | ||||||
|     ATTR_TYPE, |     ATTR_TYPE, | ||||||
|     ATTR_VERSION, |     ATTR_VERSION, | ||||||
|  |     BusEvent, | ||||||
|  |     CoreState, | ||||||
| ) | ) | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIError | from ..exceptions import APIError | ||||||
|  | from ..jobs import JobSchedulerOptions | ||||||
| from ..mounts.const import MountUsage | from ..mounts.const import MountUsage | ||||||
| from .const import CONTENT_TYPE_TAR | from ..resolution.const import UnhealthyReason | ||||||
|  | from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR | ||||||
| from .utils import api_process, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
| @@ -48,17 +55,21 @@ RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") | |||||||
| _ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT] | _ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT] | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter | # pylint: disable=no-value-for-parameter | ||||||
| SCHEMA_RESTORE_PARTIAL = vol.Schema( | SCHEMA_RESTORE_FULL = vol.Schema( | ||||||
|     { |     { | ||||||
|         vol.Optional(ATTR_PASSWORD): vol.Maybe(str), |         vol.Optional(ATTR_PASSWORD): vol.Maybe(str), | ||||||
|  |         vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend( | ||||||
|  |     { | ||||||
|         vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), |         vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), | ||||||
|         vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()), |         vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()), | ||||||
|         vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()), |         vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()), | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  |  | ||||||
| SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)}) |  | ||||||
|  |  | ||||||
| SCHEMA_BACKUP_FULL = vol.Schema( | SCHEMA_BACKUP_FULL = vol.Schema( | ||||||
|     { |     { | ||||||
|         vol.Optional(ATTR_NAME): str, |         vol.Optional(ATTR_NAME): str, | ||||||
| @@ -66,6 +77,7 @@ SCHEMA_BACKUP_FULL = vol.Schema( | |||||||
|         vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), |         vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), | ||||||
|         vol.Optional(ATTR_LOCATON): vol.Maybe(str), |         vol.Optional(ATTR_LOCATON): vol.Maybe(str), | ||||||
|         vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(), |         vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -202,46 +214,109 @@ class APIBackups(CoreSysAttributes): | |||||||
|  |  | ||||||
|         return body |         return body | ||||||
|  |  | ||||||
|  |     async def _background_backup_task( | ||||||
|  |         self, backup_method: Callable, *args, **kwargs | ||||||
|  |     ) -> tuple[asyncio.Task, str]: | ||||||
|  |         """Start backup task in  background and return task and job ID.""" | ||||||
|  |         event = asyncio.Event() | ||||||
|  |         job, backup_task = self.sys_jobs.schedule_job( | ||||||
|  |             backup_method, JobSchedulerOptions(), *args, **kwargs | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         async def release_on_freeze(new_state: CoreState): | ||||||
|  |             if new_state == CoreState.FREEZE: | ||||||
|  |                 event.set() | ||||||
|  |  | ||||||
|  |         # Wait for system to get into freeze state before returning | ||||||
|  |         # If the backup fails validation it will raise before getting there | ||||||
|  |         listener = self.sys_bus.register_event( | ||||||
|  |             BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze | ||||||
|  |         ) | ||||||
|  |         try: | ||||||
|  |             await asyncio.wait( | ||||||
|  |                 ( | ||||||
|  |                     backup_task, | ||||||
|  |                     self.sys_create_task(event.wait()), | ||||||
|  |                 ), | ||||||
|  |                 return_when=asyncio.FIRST_COMPLETED, | ||||||
|  |             ) | ||||||
|  |             return (backup_task, job.uuid) | ||||||
|  |         finally: | ||||||
|  |             self.sys_bus.remove_listener(listener) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def backup_full(self, request): |     async def backup_full(self, request): | ||||||
|         """Create full backup.""" |         """Create full backup.""" | ||||||
|         body = await api_validate(SCHEMA_BACKUP_FULL, request) |         body = await api_validate(SCHEMA_BACKUP_FULL, request) | ||||||
|  |         background = body.pop(ATTR_BACKGROUND) | ||||||
|         backup = await asyncio.shield( |         backup_task, job_id = await self._background_backup_task( | ||||||
|             self.sys_backups.do_backup_full(**self._location_to_mount(body)) |             self.sys_backups.do_backup_full, **self._location_to_mount(body) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         if background and not backup_task.done(): | ||||||
|  |             return {ATTR_JOB_ID: job_id} | ||||||
|  |  | ||||||
|  |         backup: Backup = await backup_task | ||||||
|         if backup: |         if backup: | ||||||
|             return {ATTR_SLUG: backup.slug} |             return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug} | ||||||
|         return False |         raise APIError( | ||||||
|  |             f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details", | ||||||
|  |             job_id=job_id, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def backup_partial(self, request): |     async def backup_partial(self, request): | ||||||
|         """Create a partial backup.""" |         """Create a partial backup.""" | ||||||
|         body = await api_validate(SCHEMA_BACKUP_PARTIAL, request) |         body = await api_validate(SCHEMA_BACKUP_PARTIAL, request) | ||||||
|         backup = await asyncio.shield( |         background = body.pop(ATTR_BACKGROUND) | ||||||
|             self.sys_backups.do_backup_partial(**self._location_to_mount(body)) |         backup_task, job_id = await self._background_backup_task( | ||||||
|  |             self.sys_backups.do_backup_partial, **self._location_to_mount(body) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         if background and not backup_task.done(): | ||||||
|  |             return {ATTR_JOB_ID: job_id} | ||||||
|  |  | ||||||
|  |         backup: Backup = await backup_task | ||||||
|         if backup: |         if backup: | ||||||
|             return {ATTR_SLUG: backup.slug} |             return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug} | ||||||
|         return False |         raise APIError( | ||||||
|  |             f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details", | ||||||
|  |             job_id=job_id, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def restore_full(self, request): |     async def restore_full(self, request): | ||||||
|         """Full restore of a backup.""" |         """Full restore of a backup.""" | ||||||
|         backup = self._extract_slug(request) |         backup = self._extract_slug(request) | ||||||
|         body = await api_validate(SCHEMA_RESTORE_FULL, request) |         body = await api_validate(SCHEMA_RESTORE_FULL, request) | ||||||
|  |         background = body.pop(ATTR_BACKGROUND) | ||||||
|  |         restore_task, job_id = await self._background_backup_task( | ||||||
|  |             self.sys_backups.do_restore_full, backup, **body | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body)) |         if background and not restore_task.done() or await restore_task: | ||||||
|  |             return {ATTR_JOB_ID: job_id} | ||||||
|  |         raise APIError( | ||||||
|  |             f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details", | ||||||
|  |             job_id=job_id, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def restore_partial(self, request): |     async def restore_partial(self, request): | ||||||
|         """Partial restore a backup.""" |         """Partial restore a backup.""" | ||||||
|         backup = self._extract_slug(request) |         backup = self._extract_slug(request) | ||||||
|         body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) |         body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) | ||||||
|  |         background = body.pop(ATTR_BACKGROUND) | ||||||
|  |         restore_task, job_id = await self._background_backup_task( | ||||||
|  |             self.sys_backups.do_restore_partial, backup, **body | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body)) |         if background and not restore_task.done() or await restore_task: | ||||||
|  |             return {ATTR_JOB_ID: job_id} | ||||||
|  |         raise APIError( | ||||||
|  |             f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details", | ||||||
|  |             job_id=job_id, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def freeze(self, request): |     async def freeze(self, request): | ||||||
| @@ -288,6 +363,8 @@ class APIBackups(CoreSysAttributes): | |||||||
|                         backup.write(chunk) |                         backup.write(chunk) | ||||||
|  |  | ||||||
|             except OSError as err: |             except OSError as err: | ||||||
|  |                 if err.errno == errno.EBADMSG: | ||||||
|  |                     self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|                 _LOGGER.error("Can't write new backup file: %s", err) |                 _LOGGER.error("Can't write new backup file: %s", err) | ||||||
|                 return False |                 return False | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,11 +1,14 @@ | |||||||
| """Const for API.""" | """Const for API.""" | ||||||
|  |  | ||||||
|  | from enum import StrEnum | ||||||
|  |  | ||||||
| CONTENT_TYPE_BINARY = "application/octet-stream" | CONTENT_TYPE_BINARY = "application/octet-stream" | ||||||
| CONTENT_TYPE_JSON = "application/json" | CONTENT_TYPE_JSON = "application/json" | ||||||
| CONTENT_TYPE_PNG = "image/png" | CONTENT_TYPE_PNG = "image/png" | ||||||
| CONTENT_TYPE_TAR = "application/tar" | CONTENT_TYPE_TAR = "application/tar" | ||||||
| CONTENT_TYPE_TEXT = "text/plain" | CONTENT_TYPE_TEXT = "text/plain" | ||||||
| CONTENT_TYPE_URL = "application/x-www-form-urlencoded" | CONTENT_TYPE_URL = "application/x-www-form-urlencoded" | ||||||
|  | CONTENT_TYPE_X_LOG = "text/x-log" | ||||||
|  |  | ||||||
| COOKIE_INGRESS = "ingress_session" | COOKIE_INGRESS = "ingress_session" | ||||||
|  |  | ||||||
| @@ -13,6 +16,9 @@ ATTR_AGENT_VERSION = "agent_version" | |||||||
| ATTR_APPARMOR_VERSION = "apparmor_version" | ATTR_APPARMOR_VERSION = "apparmor_version" | ||||||
| ATTR_ATTRIBUTES = "attributes" | ATTR_ATTRIBUTES = "attributes" | ||||||
| ATTR_AVAILABLE_UPDATES = "available_updates" | ATTR_AVAILABLE_UPDATES = "available_updates" | ||||||
|  | ATTR_BACKGROUND = "background" | ||||||
|  | ATTR_BOOT_SLOT = "boot_slot" | ||||||
|  | ATTR_BOOT_SLOTS = "boot_slots" | ||||||
| ATTR_BOOT_TIMESTAMP = "boot_timestamp" | ATTR_BOOT_TIMESTAMP = "boot_timestamp" | ||||||
| ATTR_BOOTS = "boots" | ATTR_BOOTS = "boots" | ||||||
| ATTR_BROADCAST_LLMNR = "broadcast_llmnr" | ATTR_BROADCAST_LLMNR = "broadcast_llmnr" | ||||||
| @@ -30,25 +36,42 @@ ATTR_DT_UTC = "dt_utc" | |||||||
| ATTR_EJECTABLE = "ejectable" | ATTR_EJECTABLE = "ejectable" | ||||||
| ATTR_FALLBACK = "fallback" | ATTR_FALLBACK = "fallback" | ||||||
| ATTR_FILESYSTEMS = "filesystems" | ATTR_FILESYSTEMS = "filesystems" | ||||||
|  | ATTR_GROUP_IDS = "group_ids" | ||||||
| ATTR_IDENTIFIERS = "identifiers" | ATTR_IDENTIFIERS = "identifiers" | ||||||
|  | ATTR_IS_ACTIVE = "is_active" | ||||||
|  | ATTR_IS_OWNER = "is_owner" | ||||||
|  | ATTR_JOB_ID = "job_id" | ||||||
| ATTR_JOBS = "jobs" | ATTR_JOBS = "jobs" | ||||||
| ATTR_LLMNR = "llmnr" | ATTR_LLMNR = "llmnr" | ||||||
| ATTR_LLMNR_HOSTNAME = "llmnr_hostname" | ATTR_LLMNR_HOSTNAME = "llmnr_hostname" | ||||||
|  | ATTR_LOCAL_ONLY = "local_only" | ||||||
| ATTR_MDNS = "mdns" | ATTR_MDNS = "mdns" | ||||||
| ATTR_MODEL = "model" | ATTR_MODEL = "model" | ||||||
| ATTR_MOUNTS = "mounts" | ATTR_MOUNTS = "mounts" | ||||||
| ATTR_MOUNT_POINTS = "mount_points" | ATTR_MOUNT_POINTS = "mount_points" | ||||||
| ATTR_PANEL_PATH = "panel_path" | ATTR_PANEL_PATH = "panel_path" | ||||||
| ATTR_REMOVABLE = "removable" | ATTR_REMOVABLE = "removable" | ||||||
|  | ATTR_REMOVE_CONFIG = "remove_config" | ||||||
| ATTR_REVISION = "revision" | ATTR_REVISION = "revision" | ||||||
|  | ATTR_SAFE_MODE = "safe_mode" | ||||||
| ATTR_SEAT = "seat" | ATTR_SEAT = "seat" | ||||||
| ATTR_SIGNED = "signed" | ATTR_SIGNED = "signed" | ||||||
| ATTR_STARTUP_TIME = "startup_time" | ATTR_STARTUP_TIME = "startup_time" | ||||||
|  | ATTR_STATUS = "status" | ||||||
| ATTR_SUBSYSTEM = "subsystem" | ATTR_SUBSYSTEM = "subsystem" | ||||||
| ATTR_SYSFS = "sysfs" | ATTR_SYSFS = "sysfs" | ||||||
| ATTR_SYSTEM_HEALTH_LED = "system_health_led" | ATTR_SYSTEM_HEALTH_LED = "system_health_led" | ||||||
| ATTR_TIME_DETECTED = "time_detected" | ATTR_TIME_DETECTED = "time_detected" | ||||||
| ATTR_UPDATE_TYPE = "update_type" | ATTR_UPDATE_TYPE = "update_type" | ||||||
| ATTR_USE_NTP = "use_ntp" |  | ||||||
| ATTR_USAGE = "usage" | ATTR_USAGE = "usage" | ||||||
|  | ATTR_USE_NTP = "use_ntp" | ||||||
|  | ATTR_USERS = "users" | ||||||
| ATTR_VENDOR = "vendor" | ATTR_VENDOR = "vendor" | ||||||
|  | ATTR_VIRTUALIZATION = "virtualization" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BootSlot(StrEnum): | ||||||
|  |     """Boot slots used by HAOS.""" | ||||||
|  |  | ||||||
|  |     A = "A" | ||||||
|  |     B = "B" | ||||||
|   | |||||||
| @@ -15,7 +15,6 @@ from ..const import ( | |||||||
|     AddonState, |     AddonState, | ||||||
| ) | ) | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..discovery.validate import valid_discovery_service |  | ||||||
| from ..exceptions import APIError, APIForbidden | from ..exceptions import APIError, APIForbidden | ||||||
| from .utils import api_process, api_validate, require_home_assistant | from .utils import api_process, api_validate, require_home_assistant | ||||||
|  |  | ||||||
| @@ -24,7 +23,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) | |||||||
| SCHEMA_DISCOVERY = vol.Schema( | SCHEMA_DISCOVERY = vol.Schema( | ||||||
|     { |     { | ||||||
|         vol.Required(ATTR_SERVICE): str, |         vol.Required(ATTR_SERVICE): str, | ||||||
|         vol.Optional(ATTR_CONFIG): vol.Maybe(dict), |         vol.Required(ATTR_CONFIG): dict, | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -71,15 +70,6 @@ class APIDiscovery(CoreSysAttributes): | |||||||
|         addon: Addon = request[REQUEST_FROM] |         addon: Addon = request[REQUEST_FROM] | ||||||
|         service = body[ATTR_SERVICE] |         service = body[ATTR_SERVICE] | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             valid_discovery_service(service) |  | ||||||
|         except vol.Invalid: |  | ||||||
|             _LOGGER.warning( |  | ||||||
|                 "Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on", |  | ||||||
|                 service, |  | ||||||
|                 addon.name, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Access? |         # Access? | ||||||
|         if body[ATTR_SERVICE] not in addon.discovery: |         if body[ATTR_SERVICE] not in addon.discovery: | ||||||
|             _LOGGER.error( |             _LOGGER.error( | ||||||
|   | |||||||
| @@ -26,8 +26,8 @@ from ..const import ( | |||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIError | from ..exceptions import APIError | ||||||
| from ..validate import dns_server_list, version_tag | from ..validate import dns_server_list, version_tag | ||||||
| from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY | from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS | ||||||
| from .utils import api_process, api_process_raw, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -105,11 +105,6 @@ class APICoreDNS(CoreSysAttributes): | |||||||
|             raise APIError(f"Version {version} is already in use") |             raise APIError(f"Version {version} is already in use") | ||||||
|         await asyncio.shield(self.sys_plugins.dns.update(version)) |         await asyncio.shield(self.sys_plugins.dns.update(version)) | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |  | ||||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: |  | ||||||
|         """Return DNS Docker logs.""" |  | ||||||
|         return self.sys_plugins.dns.logs() |  | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def restart(self, request: web.Request) -> Awaitable[None]: |     def restart(self, request: web.Request) -> Awaitable[None]: | ||||||
|         """Restart CoreDNS plugin.""" |         """Restart CoreDNS plugin.""" | ||||||
|   | |||||||
| @@ -16,7 +16,7 @@ from ..const import ( | |||||||
|     ATTR_SYSTEM, |     ATTR_SYSTEM, | ||||||
| ) | ) | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..dbus.udisks2 import UDisks2 | from ..dbus.udisks2 import UDisks2Manager | ||||||
| from ..dbus.udisks2.block import UDisks2Block | from ..dbus.udisks2.block import UDisks2Block | ||||||
| from ..dbus.udisks2.drive import UDisks2Drive | from ..dbus.udisks2.drive import UDisks2Drive | ||||||
| from ..hardware.data import Device | from ..hardware.data import Device | ||||||
| @@ -72,7 +72,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]: | |||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
| def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]: | def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]: | ||||||
|     """Return a dict with information of a disk to be used in the API.""" |     """Return a dict with information of a disk to be used in the API.""" | ||||||
|     return { |     return { | ||||||
|         ATTR_VENDOR: drive.vendor, |         ATTR_VENDOR: drive.vendor, | ||||||
|   | |||||||
| @@ -36,8 +36,8 @@ from ..const import ( | |||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIError | from ..exceptions import APIError | ||||||
| from ..validate import docker_image, network_port, version_tag | from ..validate import docker_image, network_port, version_tag | ||||||
| from .const import CONTENT_TYPE_BINARY | from .const import ATTR_SAFE_MODE | ||||||
| from .utils import api_process, api_process_raw, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -63,6 +63,12 @@ SCHEMA_UPDATE = vol.Schema( | |||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | SCHEMA_RESTART = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class APIHomeAssistant(CoreSysAttributes): | class APIHomeAssistant(CoreSysAttributes): | ||||||
|     """Handle RESTful API for Home Assistant functions.""" |     """Handle RESTful API for Home Assistant functions.""" | ||||||
| @@ -94,6 +100,9 @@ class APIHomeAssistant(CoreSysAttributes): | |||||||
|  |  | ||||||
|         if ATTR_IMAGE in body: |         if ATTR_IMAGE in body: | ||||||
|             self.sys_homeassistant.image = body[ATTR_IMAGE] |             self.sys_homeassistant.image = body[ATTR_IMAGE] | ||||||
|  |             self.sys_homeassistant.override_image = ( | ||||||
|  |                 self.sys_homeassistant.image != self.sys_homeassistant.default_image | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if ATTR_BOOT in body: |         if ATTR_BOOT in body: | ||||||
|             self.sys_homeassistant.boot = body[ATTR_BOOT] |             self.sys_homeassistant.boot = body[ATTR_BOOT] | ||||||
| @@ -164,19 +173,22 @@ class APIHomeAssistant(CoreSysAttributes): | |||||||
|         return asyncio.shield(self.sys_homeassistant.core.start()) |         return asyncio.shield(self.sys_homeassistant.core.start()) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def restart(self, request: web.Request) -> Awaitable[None]: |     async def restart(self, request: web.Request) -> None: | ||||||
|         """Restart Home Assistant.""" |         """Restart Home Assistant.""" | ||||||
|         return asyncio.shield(self.sys_homeassistant.core.restart()) |         body = await api_validate(SCHEMA_RESTART, request) | ||||||
|  |  | ||||||
|  |         await asyncio.shield( | ||||||
|  |             self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE]) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def rebuild(self, request: web.Request) -> Awaitable[None]: |     async def rebuild(self, request: web.Request) -> None: | ||||||
|         """Rebuild Home Assistant.""" |         """Rebuild Home Assistant.""" | ||||||
|         return asyncio.shield(self.sys_homeassistant.core.rebuild()) |         body = await api_validate(SCHEMA_RESTART, request) | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |         await asyncio.shield( | ||||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: |             self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE]) | ||||||
|         """Return Home Assistant Docker logs.""" |         ) | ||||||
|         return self.sys_homeassistant.core.logs() |  | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def check(self, request: web.Request) -> None: |     async def check(self, request: web.Request) -> None: | ||||||
|   | |||||||
| @@ -1,4 +1,5 @@ | |||||||
| """Init file for Supervisor host RESTful API.""" | """Init file for Supervisor host RESTful API.""" | ||||||
|  |  | ||||||
| import asyncio | import asyncio | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
| import logging | import logging | ||||||
| @@ -28,7 +29,14 @@ from ..const import ( | |||||||
| ) | ) | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIError, HostLogError | from ..exceptions import APIError, HostLogError | ||||||
| from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER | from ..host.const import ( | ||||||
|  |     PARAM_BOOT_ID, | ||||||
|  |     PARAM_FOLLOW, | ||||||
|  |     PARAM_SYSLOG_IDENTIFIER, | ||||||
|  |     LogFormat, | ||||||
|  |     LogFormatter, | ||||||
|  | ) | ||||||
|  | from ..utils.systemd_journal import journal_logs_reader | ||||||
| from .const import ( | from .const import ( | ||||||
|     ATTR_AGENT_VERSION, |     ATTR_AGENT_VERSION, | ||||||
|     ATTR_APPARMOR_VERSION, |     ATTR_APPARMOR_VERSION, | ||||||
| @@ -42,9 +50,11 @@ from .const import ( | |||||||
|     ATTR_LLMNR_HOSTNAME, |     ATTR_LLMNR_HOSTNAME, | ||||||
|     ATTR_STARTUP_TIME, |     ATTR_STARTUP_TIME, | ||||||
|     ATTR_USE_NTP, |     ATTR_USE_NTP, | ||||||
|  |     ATTR_VIRTUALIZATION, | ||||||
|     CONTENT_TYPE_TEXT, |     CONTENT_TYPE_TEXT, | ||||||
|  |     CONTENT_TYPE_X_LOG, | ||||||
| ) | ) | ||||||
| from .utils import api_process, api_validate | from .utils import api_process, api_process_raw, api_validate | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -65,6 +75,7 @@ class APIHost(CoreSysAttributes): | |||||||
|             ATTR_AGENT_VERSION: self.sys_dbus.agent.version, |             ATTR_AGENT_VERSION: self.sys_dbus.agent.version, | ||||||
|             ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version, |             ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version, | ||||||
|             ATTR_CHASSIS: self.sys_host.info.chassis, |             ATTR_CHASSIS: self.sys_host.info.chassis, | ||||||
|  |             ATTR_VIRTUALIZATION: self.sys_host.info.virtualization, | ||||||
|             ATTR_CPE: self.sys_host.info.cpe, |             ATTR_CPE: self.sys_host.info.cpe, | ||||||
|             ATTR_DEPLOYMENT: self.sys_host.info.deployment, |             ATTR_DEPLOYMENT: self.sys_host.info.deployment, | ||||||
|             ATTR_DISK_FREE: self.sys_host.info.free_space, |             ATTR_DISK_FREE: self.sys_host.info.free_space, | ||||||
| @@ -153,11 +164,11 @@ class APIHost(CoreSysAttributes): | |||||||
|                 raise APIError() from err |                 raise APIError() from err | ||||||
|         return possible_offset |         return possible_offset | ||||||
|  |  | ||||||
|     @api_process |     async def advanced_logs_handler( | ||||||
|     async def advanced_logs( |  | ||||||
|         self, request: web.Request, identifier: str | None = None, follow: bool = False |         self, request: web.Request, identifier: str | None = None, follow: bool = False | ||||||
|     ) -> web.StreamResponse: |     ) -> web.StreamResponse: | ||||||
|         """Return systemd-journald logs.""" |         """Return systemd-journald logs.""" | ||||||
|  |         log_formatter = LogFormatter.PLAIN | ||||||
|         params = {} |         params = {} | ||||||
|         if identifier: |         if identifier: | ||||||
|             params[PARAM_SYSLOG_IDENTIFIER] = identifier |             params[PARAM_SYSLOG_IDENTIFIER] = identifier | ||||||
| @@ -165,6 +176,8 @@ class APIHost(CoreSysAttributes): | |||||||
|             params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER) |             params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER) | ||||||
|         else: |         else: | ||||||
|             params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers |             params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers | ||||||
|  |             # host logs should be always verbose, no matter what Accept header is used | ||||||
|  |             log_formatter = LogFormatter.VERBOSE | ||||||
|  |  | ||||||
|         if BOOTID in request.match_info: |         if BOOTID in request.match_info: | ||||||
|             params[PARAM_BOOT_ID] = await self._get_boot_id( |             params[PARAM_BOOT_ID] = await self._get_boot_id( | ||||||
| @@ -175,28 +188,40 @@ class APIHost(CoreSysAttributes): | |||||||
|  |  | ||||||
|         if ACCEPT in request.headers and request.headers[ACCEPT] not in [ |         if ACCEPT in request.headers and request.headers[ACCEPT] not in [ | ||||||
|             CONTENT_TYPE_TEXT, |             CONTENT_TYPE_TEXT, | ||||||
|  |             CONTENT_TYPE_X_LOG, | ||||||
|             "*/*", |             "*/*", | ||||||
|         ]: |         ]: | ||||||
|             raise APIError( |             raise APIError( | ||||||
|                 "Invalid content type requested. Only text/plain supported for now." |                 "Invalid content type requested. Only text/plain and text/x-log " | ||||||
|  |                 "supported for now." | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  |         if request.headers[ACCEPT] == CONTENT_TYPE_X_LOG: | ||||||
|  |             log_formatter = LogFormatter.VERBOSE | ||||||
|  |  | ||||||
|         if RANGE in request.headers: |         if RANGE in request.headers: | ||||||
|             range_header = request.headers.get(RANGE) |             range_header = request.headers.get(RANGE) | ||||||
|         else: |         else: | ||||||
|             range_header = f"entries=:-{DEFAULT_RANGE}:" |             range_header = f"entries=:-{DEFAULT_RANGE}:" | ||||||
|  |  | ||||||
|         async with self.sys_host.logs.journald_logs( |         async with self.sys_host.logs.journald_logs( | ||||||
|             params=params, range_header=range_header |             params=params, range_header=range_header, accept=LogFormat.JOURNAL | ||||||
|         ) as resp: |         ) as resp: | ||||||
|             try: |             try: | ||||||
|                 response = web.StreamResponse() |                 response = web.StreamResponse() | ||||||
|                 response.content_type = CONTENT_TYPE_TEXT |                 response.content_type = CONTENT_TYPE_TEXT | ||||||
|                 await response.prepare(request) |                 await response.prepare(request) | ||||||
|                 async for data in resp.content: |                 async for line in journal_logs_reader(resp, log_formatter): | ||||||
|                     await response.write(data) |                     await response.write(line.encode("utf-8") + b"\n") | ||||||
|             except ConnectionResetError as ex: |             except ConnectionResetError as ex: | ||||||
|                 raise APIError( |                 raise APIError( | ||||||
|                     "Connection reset when trying to fetch data from systemd-journald." |                     "Connection reset when trying to fetch data from systemd-journald." | ||||||
|                 ) from ex |                 ) from ex | ||||||
|             return response |             return response | ||||||
|  |  | ||||||
|  |     @api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT) | ||||||
|  |     async def advanced_logs( | ||||||
|  |         self, request: web.Request, identifier: str | None = None, follow: bool = False | ||||||
|  |     ) -> web.StreamResponse: | ||||||
|  |         """Return systemd-journald logs. Wrapped as standard API handler.""" | ||||||
|  |         return await self.advanced_logs_handler(request, identifier, follow) | ||||||
|   | |||||||
| @@ -6,6 +6,7 @@ from aiohttp import web | |||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
|  | from ..exceptions import APIError | ||||||
| from ..jobs import SupervisorJob | from ..jobs import SupervisorJob | ||||||
| from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition | from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition | ||||||
| from .const import ATTR_JOBS | from .const import ATTR_JOBS | ||||||
| @@ -21,7 +22,7 @@ SCHEMA_OPTIONS = vol.Schema( | |||||||
| class APIJobs(CoreSysAttributes): | class APIJobs(CoreSysAttributes): | ||||||
|     """Handle RESTful API for OS functions.""" |     """Handle RESTful API for OS functions.""" | ||||||
|  |  | ||||||
|     def _list_jobs(self) -> list[dict[str, Any]]: |     def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]: | ||||||
|         """Return current job tree.""" |         """Return current job tree.""" | ||||||
|         jobs_by_parent: dict[str | None, list[SupervisorJob]] = {} |         jobs_by_parent: dict[str | None, list[SupervisorJob]] = {} | ||||||
|         for job in self.sys_jobs.jobs: |         for job in self.sys_jobs.jobs: | ||||||
| @@ -34,9 +35,11 @@ class APIJobs(CoreSysAttributes): | |||||||
|                 jobs_by_parent[job.parent_id].append(job) |                 jobs_by_parent[job.parent_id].append(job) | ||||||
|  |  | ||||||
|         job_list: list[dict[str, Any]] = [] |         job_list: list[dict[str, Any]] = [] | ||||||
|         queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [ |         queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = ( | ||||||
|             (job_list, job) for job in jobs_by_parent.get(None, []) |             [(job_list, start)] | ||||||
|         ] |             if start | ||||||
|  |             else [(job_list, job) for job in jobs_by_parent.get(None, [])] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         while queue: |         while queue: | ||||||
|             (current_list, current_job) = queue.pop(0) |             (current_list, current_job) = queue.pop(0) | ||||||
| @@ -78,3 +81,19 @@ class APIJobs(CoreSysAttributes): | |||||||
|     async def reset(self, request: web.Request) -> None: |     async def reset(self, request: web.Request) -> None: | ||||||
|         """Reset options for JobManager.""" |         """Reset options for JobManager.""" | ||||||
|         self.sys_jobs.reset_data() |         self.sys_jobs.reset_data() | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def job_info(self, request: web.Request) -> dict[str, Any]: | ||||||
|  |         """Get details of a job by ID.""" | ||||||
|  |         job = self.sys_jobs.get_job(request.match_info.get("uuid")) | ||||||
|  |         return self._list_jobs(job)[0] | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def remove_job(self, request: web.Request) -> None: | ||||||
|  |         """Remove a completed job.""" | ||||||
|  |         job = self.sys_jobs.get_job(request.match_info.get("uuid")) | ||||||
|  |  | ||||||
|  |         if not job.done: | ||||||
|  |             raise APIError(f"Job {job.uuid} is not done!") | ||||||
|  |  | ||||||
|  |         self.sys_jobs.remove_job(job) | ||||||
|   | |||||||
| @@ -103,6 +103,8 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = { | |||||||
|         r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?" |         r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?" | ||||||
|         r"|/audio/.+" |         r"|/audio/.+" | ||||||
|         r"|/auth/cache" |         r"|/auth/cache" | ||||||
|  |         r"|/available_updates" | ||||||
|  |         r"|/backups.*" | ||||||
|         r"|/cli/.+" |         r"|/cli/.+" | ||||||
|         r"|/core/.+" |         r"|/core/.+" | ||||||
|         r"|/dns/.+" |         r"|/dns/.+" | ||||||
| @@ -112,16 +114,17 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = { | |||||||
|         r"|/hassos/.+" |         r"|/hassos/.+" | ||||||
|         r"|/homeassistant/.+" |         r"|/homeassistant/.+" | ||||||
|         r"|/host/.+" |         r"|/host/.+" | ||||||
|  |         r"|/mounts.*" | ||||||
|         r"|/multicast/.+" |         r"|/multicast/.+" | ||||||
|         r"|/network/.+" |         r"|/network/.+" | ||||||
|         r"|/observer/.+" |         r"|/observer/.+" | ||||||
|         r"|/os/.+" |         r"|/os/(?!datadisk/wipe).+" | ||||||
|  |         r"|/refresh_updates" | ||||||
|         r"|/resolution/.+" |         r"|/resolution/.+" | ||||||
|         r"|/backups.*" |         r"|/security/.+" | ||||||
|         r"|/snapshots.*" |         r"|/snapshots.*" | ||||||
|         r"|/store.*" |         r"|/store.*" | ||||||
|         r"|/supervisor/.+" |         r"|/supervisor/.+" | ||||||
|         r"|/security/.+" |  | ||||||
|         r")$" |         r")$" | ||||||
|     ), |     ), | ||||||
|     ROLE_ADMIN: re.compile( |     ROLE_ADMIN: re.compile( | ||||||
|   | |||||||
| @@ -23,8 +23,7 @@ from ..const import ( | |||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIError | from ..exceptions import APIError | ||||||
| from ..validate import version_tag | from ..validate import version_tag | ||||||
| from .const import CONTENT_TYPE_BINARY | from .utils import api_process, api_validate | ||||||
| from .utils import api_process, api_process_raw, api_validate |  | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -69,11 +68,6 @@ class APIMulticast(CoreSysAttributes): | |||||||
|             raise APIError(f"Version {version} is already in use") |             raise APIError(f"Version {version} is already in use") | ||||||
|         await asyncio.shield(self.sys_plugins.multicast.update(version)) |         await asyncio.shield(self.sys_plugins.multicast.update(version)) | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |  | ||||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: |  | ||||||
|         """Return Multicast Docker logs.""" |  | ||||||
|         return self.sys_plugins.multicast.logs() |  | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def restart(self, request: web.Request) -> Awaitable[None]: |     def restart(self, request: web.Request) -> Awaitable[None]: | ||||||
|         """Restart Multicast plugin.""" |         """Restart Multicast plugin.""" | ||||||
|   | |||||||
| @@ -19,6 +19,7 @@ from ..const import ( | |||||||
|     ATTR_POWER_LED, |     ATTR_POWER_LED, | ||||||
|     ATTR_SERIAL, |     ATTR_SERIAL, | ||||||
|     ATTR_SIZE, |     ATTR_SIZE, | ||||||
|  |     ATTR_STATE, | ||||||
|     ATTR_UPDATE_AVAILABLE, |     ATTR_UPDATE_AVAILABLE, | ||||||
|     ATTR_VERSION, |     ATTR_VERSION, | ||||||
|     ATTR_VERSION_LATEST, |     ATTR_VERSION_LATEST, | ||||||
| @@ -28,13 +29,17 @@ from ..exceptions import BoardInvalidError | |||||||
| from ..resolution.const import ContextType, IssueType, SuggestionType | from ..resolution.const import ContextType, IssueType, SuggestionType | ||||||
| from ..validate import version_tag | from ..validate import version_tag | ||||||
| from .const import ( | from .const import ( | ||||||
|  |     ATTR_BOOT_SLOT, | ||||||
|  |     ATTR_BOOT_SLOTS, | ||||||
|     ATTR_DATA_DISK, |     ATTR_DATA_DISK, | ||||||
|     ATTR_DEV_PATH, |     ATTR_DEV_PATH, | ||||||
|     ATTR_DEVICE, |     ATTR_DEVICE, | ||||||
|     ATTR_DISKS, |     ATTR_DISKS, | ||||||
|     ATTR_MODEL, |     ATTR_MODEL, | ||||||
|  |     ATTR_STATUS, | ||||||
|     ATTR_SYSTEM_HEALTH_LED, |     ATTR_SYSTEM_HEALTH_LED, | ||||||
|     ATTR_VENDOR, |     ATTR_VENDOR, | ||||||
|  |     BootSlot, | ||||||
| ) | ) | ||||||
| from .utils import api_process, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| @@ -42,6 +47,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) | |||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter | # pylint: disable=no-value-for-parameter | ||||||
| SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) | SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) | ||||||
|  | SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)}) | ||||||
| SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str}) | SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str}) | ||||||
|  |  | ||||||
| SCHEMA_YELLOW_OPTIONS = vol.Schema( | SCHEMA_YELLOW_OPTIONS = vol.Schema( | ||||||
| @@ -74,6 +80,15 @@ class APIOS(CoreSysAttributes): | |||||||
|             ATTR_BOARD: self.sys_os.board, |             ATTR_BOARD: self.sys_os.board, | ||||||
|             ATTR_BOOT: self.sys_dbus.rauc.boot_slot, |             ATTR_BOOT: self.sys_dbus.rauc.boot_slot, | ||||||
|             ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id, |             ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id, | ||||||
|  |             ATTR_BOOT_SLOTS: { | ||||||
|  |                 slot.bootname: { | ||||||
|  |                     ATTR_STATE: slot.state, | ||||||
|  |                     ATTR_STATUS: slot.boot_status, | ||||||
|  |                     ATTR_VERSION: slot.bundle_version, | ||||||
|  |                 } | ||||||
|  |                 for slot in self.sys_os.slots | ||||||
|  |                 if slot.bootname | ||||||
|  |             }, | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
| @@ -96,6 +111,17 @@ class APIOS(CoreSysAttributes): | |||||||
|  |  | ||||||
|         await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE])) |         await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE])) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     def wipe_data(self, request: web.Request) -> Awaitable[None]: | ||||||
|  |         """Trigger data disk wipe on Host.""" | ||||||
|  |         return asyncio.shield(self.sys_os.datadisk.wipe_disk()) | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def set_boot_slot(self, request: web.Request) -> None: | ||||||
|  |         """Change the active boot slot and reboot into it.""" | ||||||
|  |         body = await api_validate(SCHEMA_SET_BOOT_SLOT, request) | ||||||
|  |         await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT])) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def list_data(self, request: web.Request) -> dict[str, Any]: |     async def list_data(self, request: web.Request) -> dict[str, Any]: | ||||||
|         """Return possible data targets.""" |         """Return possible data targets.""" | ||||||
| @@ -130,13 +156,17 @@ class APIOS(CoreSysAttributes): | |||||||
|         body = await api_validate(SCHEMA_GREEN_OPTIONS, request) |         body = await api_validate(SCHEMA_GREEN_OPTIONS, request) | ||||||
|  |  | ||||||
|         if ATTR_ACTIVITY_LED in body: |         if ATTR_ACTIVITY_LED in body: | ||||||
|             self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED] |             await self.sys_dbus.agent.board.green.set_activity_led( | ||||||
|  |                 body[ATTR_ACTIVITY_LED] | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if ATTR_POWER_LED in body: |         if ATTR_POWER_LED in body: | ||||||
|             self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED] |             await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED]) | ||||||
|  |  | ||||||
|         if ATTR_SYSTEM_HEALTH_LED in body: |         if ATTR_SYSTEM_HEALTH_LED in body: | ||||||
|             self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED] |             await self.sys_dbus.agent.board.green.set_user_led( | ||||||
|  |                 body[ATTR_SYSTEM_HEALTH_LED] | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         self.sys_dbus.agent.board.green.save_data() |         self.sys_dbus.agent.board.green.save_data() | ||||||
|  |  | ||||||
| @@ -155,13 +185,15 @@ class APIOS(CoreSysAttributes): | |||||||
|         body = await api_validate(SCHEMA_YELLOW_OPTIONS, request) |         body = await api_validate(SCHEMA_YELLOW_OPTIONS, request) | ||||||
|  |  | ||||||
|         if ATTR_DISK_LED in body: |         if ATTR_DISK_LED in body: | ||||||
|             self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED] |             await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED]) | ||||||
|  |  | ||||||
|         if ATTR_HEARTBEAT_LED in body: |         if ATTR_HEARTBEAT_LED in body: | ||||||
|             self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED] |             await self.sys_dbus.agent.board.yellow.set_heartbeat_led( | ||||||
|  |                 body[ATTR_HEARTBEAT_LED] | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if ATTR_POWER_LED in body: |         if ATTR_POWER_LED in body: | ||||||
|             self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED] |             await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED]) | ||||||
|  |  | ||||||
|         self.sys_dbus.agent.board.yellow.save_data() |         self.sys_dbus.agent.board.yellow.save_data() | ||||||
|         self.sys_resolution.create_issue( |         self.sys_resolution.create_issue( | ||||||
|   | |||||||
| @@ -14,6 +14,7 @@ from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized | |||||||
|  |  | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError | from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError | ||||||
|  | from ..utils.json import json_dumps | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -145,7 +146,8 @@ class APIProxy(CoreSysAttributes): | |||||||
|                 { |                 { | ||||||
|                     "type": "auth", |                     "type": "auth", | ||||||
|                     "access_token": self.sys_homeassistant.api.access_token, |                     "access_token": self.sys_homeassistant.api.access_token, | ||||||
|                 } |                 }, | ||||||
|  |                 dumps=json_dumps, | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|             data = await client.receive_json() |             data = await client.receive_json() | ||||||
| @@ -184,6 +186,9 @@ class APIProxy(CoreSysAttributes): | |||||||
|             return await target.send_str(msg.data) |             return await target.send_str(msg.data) | ||||||
|         if msg.type == WSMsgType.BINARY: |         if msg.type == WSMsgType.BINARY: | ||||||
|             return await target.send_bytes(msg.data) |             return await target.send_bytes(msg.data) | ||||||
|  |         if msg.type == WSMsgType.CLOSE: | ||||||
|  |             _LOGGER.debug("Received close message from WebSocket.") | ||||||
|  |             return await target.close() | ||||||
|  |  | ||||||
|         raise TypeError( |         raise TypeError( | ||||||
|             f"Cannot proxy websocket message of unsupported type: {msg.type}" |             f"Cannot proxy websocket message of unsupported type: {msg.type}" | ||||||
| @@ -198,11 +203,13 @@ class APIProxy(CoreSysAttributes): | |||||||
|         # init server |         # init server | ||||||
|         server = web.WebSocketResponse(heartbeat=30) |         server = web.WebSocketResponse(heartbeat=30) | ||||||
|         await server.prepare(request) |         await server.prepare(request) | ||||||
|  |         addon_name = None | ||||||
|  |  | ||||||
|         # handle authentication |         # handle authentication | ||||||
|         try: |         try: | ||||||
|             await server.send_json( |             await server.send_json( | ||||||
|                 {"type": "auth_required", "ha_version": self.sys_homeassistant.version} |                 {"type": "auth_required", "ha_version": self.sys_homeassistant.version}, | ||||||
|  |                 dumps=json_dumps, | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|             # Check API access |             # Check API access | ||||||
| @@ -215,14 +222,17 @@ class APIProxy(CoreSysAttributes): | |||||||
|             if not addon or not addon.access_homeassistant_api: |             if not addon or not addon.access_homeassistant_api: | ||||||
|                 _LOGGER.warning("Unauthorized WebSocket access!") |                 _LOGGER.warning("Unauthorized WebSocket access!") | ||||||
|                 await server.send_json( |                 await server.send_json( | ||||||
|                     {"type": "auth_invalid", "message": "Invalid access"} |                     {"type": "auth_invalid", "message": "Invalid access"}, | ||||||
|  |                     dumps=json_dumps, | ||||||
|                 ) |                 ) | ||||||
|                 return server |                 return server | ||||||
|  |  | ||||||
|             _LOGGER.info("WebSocket access from %s", addon.slug) |             addon_name = addon.slug | ||||||
|  |             _LOGGER.info("WebSocket access from %s", addon_name) | ||||||
|  |  | ||||||
|             await server.send_json( |             await server.send_json( | ||||||
|                 {"type": "auth_ok", "ha_version": self.sys_homeassistant.version} |                 {"type": "auth_ok", "ha_version": self.sys_homeassistant.version}, | ||||||
|  |                 dumps=json_dumps, | ||||||
|             ) |             ) | ||||||
|         except (RuntimeError, ValueError) as err: |         except (RuntimeError, ValueError) as err: | ||||||
|             _LOGGER.error("Can't initialize handshake: %s", err) |             _LOGGER.error("Can't initialize handshake: %s", err) | ||||||
| @@ -277,5 +287,5 @@ class APIProxy(CoreSysAttributes): | |||||||
|             if not server.closed: |             if not server.closed: | ||||||
|                 await server.close() |                 await server.close() | ||||||
|  |  | ||||||
|         _LOGGER.info("Home Assistant WebSocket API connection is closed") |         _LOGGER.info("Home Assistant WebSocket API for %s closed", addon_name) | ||||||
|         return server |         return server | ||||||
|   | |||||||
| @@ -6,7 +6,7 @@ from typing import Any | |||||||
| from aiohttp import web | from aiohttp import web | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from ..addons import AnyAddon | from ..addons.manager import AnyAddon | ||||||
| from ..addons.utils import rating_security | from ..addons.utils import rating_security | ||||||
| from ..api.const import ATTR_SIGNED | from ..api.const import ATTR_SIGNED | ||||||
| from ..api.utils import api_process, api_process_raw, api_validate | from ..api.utils import api_process, api_process_raw, api_validate | ||||||
| @@ -249,9 +249,14 @@ class APIStore(CoreSysAttributes): | |||||||
|     @api_process_raw(CONTENT_TYPE_TEXT) |     @api_process_raw(CONTENT_TYPE_TEXT) | ||||||
|     async def addons_addon_changelog(self, request: web.Request) -> str: |     async def addons_addon_changelog(self, request: web.Request) -> str: | ||||||
|         """Return changelog from add-on.""" |         """Return changelog from add-on.""" | ||||||
|         addon = self._extract_addon(request) |         # Frontend can't handle error response here, need to return 200 and error as text for now | ||||||
|  |         try: | ||||||
|  |             addon = self._extract_addon(request) | ||||||
|  |         except APIError as err: | ||||||
|  |             return str(err) | ||||||
|  |  | ||||||
|         if not addon.with_changelog: |         if not addon.with_changelog: | ||||||
|             raise APIError(f"No changelog found for add-on {addon.slug}!") |             return f"No changelog found for add-on {addon.slug}!" | ||||||
|  |  | ||||||
|         with addon.path_changelog.open("r") as changelog: |         with addon.path_changelog.open("r") as changelog: | ||||||
|             return changelog.read() |             return changelog.read() | ||||||
| @@ -259,9 +264,14 @@ class APIStore(CoreSysAttributes): | |||||||
|     @api_process_raw(CONTENT_TYPE_TEXT) |     @api_process_raw(CONTENT_TYPE_TEXT) | ||||||
|     async def addons_addon_documentation(self, request: web.Request) -> str: |     async def addons_addon_documentation(self, request: web.Request) -> str: | ||||||
|         """Return documentation from add-on.""" |         """Return documentation from add-on.""" | ||||||
|         addon = self._extract_addon(request) |         # Frontend can't handle error response here, need to return 200 and error as text for now | ||||||
|  |         try: | ||||||
|  |             addon = self._extract_addon(request) | ||||||
|  |         except APIError as err: | ||||||
|  |             return str(err) | ||||||
|  |  | ||||||
|         if not addon.with_documentation: |         if not addon.with_documentation: | ||||||
|             raise APIError(f"No documentation found for add-on {addon.slug}!") |             return f"No documentation found for add-on {addon.slug}!" | ||||||
|  |  | ||||||
|         with addon.path_documentation.open("r") as documentation: |         with addon.path_documentation.open("r") as documentation: | ||||||
|             return documentation.read() |             return documentation.read() | ||||||
|   | |||||||
| @@ -49,7 +49,7 @@ from ..store.validate import repositories | |||||||
| from ..utils.sentry import close_sentry, init_sentry | from ..utils.sentry import close_sentry, init_sentry | ||||||
| from ..utils.validate import validate_timezone | from ..utils.validate import validate_timezone | ||||||
| from ..validate import version_tag, wait_boot | from ..validate import version_tag, wait_boot | ||||||
| from .const import CONTENT_TYPE_BINARY | from .const import CONTENT_TYPE_TEXT | ||||||
| from .utils import api_process, api_process_raw, api_validate | from .utils import api_process, api_process_raw, api_validate | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
| @@ -140,7 +140,7 @@ class APISupervisor(CoreSysAttributes): | |||||||
|  |  | ||||||
|         if ATTR_DIAGNOSTICS in body: |         if ATTR_DIAGNOSTICS in body: | ||||||
|             self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS] |             self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS] | ||||||
|             self.sys_dbus.agent.diagnostics = body[ATTR_DIAGNOSTICS] |             await self.sys_dbus.agent.set_diagnostics(body[ATTR_DIAGNOSTICS]) | ||||||
|  |  | ||||||
|             if body[ATTR_DIAGNOSTICS]: |             if body[ATTR_DIAGNOSTICS]: | ||||||
|                 init_sentry(self.coresys) |                 init_sentry(self.coresys) | ||||||
| @@ -229,7 +229,7 @@ class APISupervisor(CoreSysAttributes): | |||||||
|         """Soft restart Supervisor.""" |         """Soft restart Supervisor.""" | ||||||
|         return asyncio.shield(self.sys_supervisor.restart()) |         return asyncio.shield(self.sys_supervisor.restart()) | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |     @api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT) | ||||||
|     def logs(self, request: web.Request) -> Awaitable[bytes]: |     def logs(self, request: web.Request) -> Awaitable[bytes]: | ||||||
|         """Return supervisor Docker logs.""" |         """Return supervisor Docker logs.""" | ||||||
|         return self.sys_supervisor.logs() |         return self.sys_supervisor.logs() | ||||||
|   | |||||||
| @@ -13,6 +13,7 @@ from ..const import ( | |||||||
|     HEADER_TOKEN, |     HEADER_TOKEN, | ||||||
|     HEADER_TOKEN_OLD, |     HEADER_TOKEN_OLD, | ||||||
|     JSON_DATA, |     JSON_DATA, | ||||||
|  |     JSON_JOB_ID, | ||||||
|     JSON_MESSAGE, |     JSON_MESSAGE, | ||||||
|     JSON_RESULT, |     JSON_RESULT, | ||||||
|     REQUEST_FROM, |     REQUEST_FROM, | ||||||
| @@ -22,9 +23,9 @@ from ..const import ( | |||||||
| from ..coresys import CoreSys | from ..coresys import CoreSys | ||||||
| from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError | from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError | ||||||
| from ..utils import check_exception_chain, get_message_from_exception_chain | from ..utils import check_exception_chain, get_message_from_exception_chain | ||||||
| from ..utils.json import JSONEncoder | from ..utils.json import json_dumps, json_loads as json_loads_util | ||||||
| from ..utils.log_format import format_message | from ..utils.log_format import format_message | ||||||
| from .const import CONTENT_TYPE_BINARY | from . import const | ||||||
|  |  | ||||||
|  |  | ||||||
| def excract_supervisor_token(request: web.Request) -> str | None: | def excract_supervisor_token(request: web.Request) -> str | None: | ||||||
| @@ -48,7 +49,7 @@ def json_loads(data: Any) -> dict[str, Any]: | |||||||
|     if not data: |     if not data: | ||||||
|         return {} |         return {} | ||||||
|     try: |     try: | ||||||
|         return json.loads(data) |         return json_loads_util(data) | ||||||
|     except json.JSONDecodeError as err: |     except json.JSONDecodeError as err: | ||||||
|         raise APIError("Invalid json") from err |         raise APIError("Invalid json") from err | ||||||
|  |  | ||||||
| @@ -90,7 +91,7 @@ def require_home_assistant(method): | |||||||
|     return wrap_api |     return wrap_api | ||||||
|  |  | ||||||
|  |  | ||||||
| def api_process_raw(content): | def api_process_raw(content, *, error_type=None): | ||||||
|     """Wrap content_type into function.""" |     """Wrap content_type into function.""" | ||||||
|  |  | ||||||
|     def wrap_method(method): |     def wrap_method(method): | ||||||
| @@ -100,15 +101,15 @@ def api_process_raw(content): | |||||||
|             """Return api information.""" |             """Return api information.""" | ||||||
|             try: |             try: | ||||||
|                 msg_data = await method(api, *args, **kwargs) |                 msg_data = await method(api, *args, **kwargs) | ||||||
|                 msg_type = content |             except HassioError as err: | ||||||
|             except (APIError, APIForbidden) as err: |                 return api_return_error( | ||||||
|                 msg_data = str(err).encode() |                     err, error_type=error_type or const.CONTENT_TYPE_BINARY | ||||||
|                 msg_type = CONTENT_TYPE_BINARY |                 ) | ||||||
|             except HassioError: |  | ||||||
|                 msg_data = b"" |  | ||||||
|                 msg_type = CONTENT_TYPE_BINARY |  | ||||||
|  |  | ||||||
|             return web.Response(body=msg_data, content_type=msg_type) |             if isinstance(msg_data, (web.Response, web.StreamResponse)): | ||||||
|  |                 return msg_data | ||||||
|  |  | ||||||
|  |             return web.Response(body=msg_data, content_type=content) | ||||||
|  |  | ||||||
|         return wrap_api |         return wrap_api | ||||||
|  |  | ||||||
| @@ -116,21 +117,41 @@ def api_process_raw(content): | |||||||
|  |  | ||||||
|  |  | ||||||
| def api_return_error( | def api_return_error( | ||||||
|     error: Exception | None = None, message: str | None = None |     error: Exception | None = None, | ||||||
|  |     message: str | None = None, | ||||||
|  |     error_type: str | None = None, | ||||||
| ) -> web.Response: | ) -> web.Response: | ||||||
|     """Return an API error message.""" |     """Return an API error message.""" | ||||||
|     if error and not message: |     if error and not message: | ||||||
|         message = get_message_from_exception_chain(error) |         message = get_message_from_exception_chain(error) | ||||||
|         if check_exception_chain(error, DockerAPIError): |         if check_exception_chain(error, DockerAPIError): | ||||||
|             message = format_message(message) |             message = format_message(message) | ||||||
|  |     if not message: | ||||||
|  |         message = "Unknown error, see supervisor" | ||||||
|  |  | ||||||
|  |     status = 400 | ||||||
|  |     if is_api_error := isinstance(error, APIError): | ||||||
|  |         status = error.status | ||||||
|  |  | ||||||
|  |     match error_type: | ||||||
|  |         case const.CONTENT_TYPE_TEXT: | ||||||
|  |             return web.Response(body=message, content_type=error_type, status=status) | ||||||
|  |         case const.CONTENT_TYPE_BINARY: | ||||||
|  |             return web.Response( | ||||||
|  |                 body=message.encode(), content_type=error_type, status=status | ||||||
|  |             ) | ||||||
|  |         case _: | ||||||
|  |             result = { | ||||||
|  |                 JSON_RESULT: RESULT_ERROR, | ||||||
|  |                 JSON_MESSAGE: message, | ||||||
|  |             } | ||||||
|  |             if is_api_error and error.job_id: | ||||||
|  |                 result[JSON_JOB_ID] = error.job_id | ||||||
|  |  | ||||||
|     return web.json_response( |     return web.json_response( | ||||||
|         { |         result, | ||||||
|             JSON_RESULT: RESULT_ERROR, |         status=status, | ||||||
|             JSON_MESSAGE: message or "Unknown error, see supervisor", |         dumps=json_dumps, | ||||||
|         }, |  | ||||||
|         status=400, |  | ||||||
|         dumps=lambda x: json.dumps(x, cls=JSONEncoder), |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -138,7 +159,7 @@ def api_return_ok(data: dict[str, Any] | None = None) -> web.Response: | |||||||
|     """Return an API ok answer.""" |     """Return an API ok answer.""" | ||||||
|     return web.json_response( |     return web.json_response( | ||||||
|         {JSON_RESULT: RESULT_OK, JSON_DATA: data or {}}, |         {JSON_RESULT: RESULT_OK, JSON_DATA: data or {}}, | ||||||
|         dumps=lambda x: json.dumps(x, cls=JSONEncoder), |         dumps=json_dumps, | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,11 +2,18 @@ | |||||||
| import asyncio | import asyncio | ||||||
| import hashlib | import hashlib | ||||||
| import logging | import logging | ||||||
|  | from typing import Any | ||||||
|  |  | ||||||
| from .addons.addon import Addon | from .addons.addon import Addon | ||||||
| from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH | from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_TYPE, ATTR_USERNAME, FILE_HASSIO_AUTH | ||||||
| from .coresys import CoreSys, CoreSysAttributes | from .coresys import CoreSys, CoreSysAttributes | ||||||
| from .exceptions import AuthError, AuthPasswordResetError, HomeAssistantAPIError | from .exceptions import ( | ||||||
|  |     AuthError, | ||||||
|  |     AuthListUsersError, | ||||||
|  |     AuthPasswordResetError, | ||||||
|  |     HomeAssistantAPIError, | ||||||
|  |     HomeAssistantWSError, | ||||||
|  | ) | ||||||
| from .utils.common import FileConfiguration | from .utils.common import FileConfiguration | ||||||
| from .validate import SCHEMA_AUTH_CONFIG | from .validate import SCHEMA_AUTH_CONFIG | ||||||
|  |  | ||||||
| @@ -132,6 +139,17 @@ class Auth(FileConfiguration, CoreSysAttributes): | |||||||
|  |  | ||||||
|         raise AuthPasswordResetError() |         raise AuthPasswordResetError() | ||||||
|  |  | ||||||
|  |     async def list_users(self) -> list[dict[str, Any]]: | ||||||
|  |         """List users on the Home Assistant instance.""" | ||||||
|  |         try: | ||||||
|  |             return await self.sys_homeassistant.websocket.async_send_command( | ||||||
|  |                 {ATTR_TYPE: "config/auth/list"} | ||||||
|  |             ) | ||||||
|  |         except HomeAssistantWSError: | ||||||
|  |             _LOGGER.error("Can't request listing users on Home Assistant!") | ||||||
|  |  | ||||||
|  |         raise AuthListUsersError() | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def _rehash(value: str, salt2: str = "") -> str: |     def _rehash(value: str, salt2: str = "") -> str: | ||||||
|         """Rehash a value.""" |         """Rehash a value.""" | ||||||
|   | |||||||
| @@ -1,14 +1,18 @@ | |||||||
| """Representation of a backup file.""" | """Representation of a backup file.""" | ||||||
| import asyncio | import asyncio | ||||||
| from base64 import b64decode, b64encode | from base64 import b64decode, b64encode | ||||||
|  | from collections import defaultdict | ||||||
| from collections.abc import Awaitable | from collections.abc import Awaitable | ||||||
|  | from copy import deepcopy | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| from functools import cached_property | from functools import cached_property | ||||||
|  | import io | ||||||
| import json | import json | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import tarfile | import tarfile | ||||||
| from tempfile import TemporaryDirectory | from tempfile import TemporaryDirectory | ||||||
|  | import time | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from awesomeversion import AwesomeVersion, AwesomeVersionCompareException | from awesomeversion import AwesomeVersion, AwesomeVersionCompareException | ||||||
| @@ -19,7 +23,7 @@ from securetar import SecureTarFile, atomic_contents_add, secure_path | |||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
| from voluptuous.humanize import humanize_error | from voluptuous.humanize import humanize_error | ||||||
|  |  | ||||||
| from ..addons import Addon | from ..addons.manager import Addon | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     ATTR_ADDONS, |     ATTR_ADDONS, | ||||||
|     ATTR_COMPRESSED, |     ATTR_COMPRESSED, | ||||||
| @@ -42,11 +46,14 @@ from ..const import ( | |||||||
|     ATTR_VERSION, |     ATTR_VERSION, | ||||||
|     CRYPTO_AES128, |     CRYPTO_AES128, | ||||||
| ) | ) | ||||||
| from ..coresys import CoreSys, CoreSysAttributes | from ..coresys import CoreSys | ||||||
| from ..exceptions import AddonsError, BackupError | from ..exceptions import AddonsError, BackupError, BackupInvalidError | ||||||
|  | from ..jobs.const import JOB_GROUP_BACKUP | ||||||
|  | from ..jobs.decorator import Job | ||||||
|  | from ..jobs.job_group import JobGroup | ||||||
| from ..utils import remove_folder | from ..utils import remove_folder | ||||||
| from ..utils.dt import parse_datetime, utcnow | from ..utils.dt import parse_datetime, utcnow | ||||||
| from ..utils.json import write_json_file | from ..utils.json import json_bytes | ||||||
| from .const import BUF_SIZE, BackupType | from .const import BUF_SIZE, BackupType | ||||||
| from .utils import key_to_iv, password_to_key | from .utils import key_to_iv, password_to_key | ||||||
| from .validate import SCHEMA_BACKUP | from .validate import SCHEMA_BACKUP | ||||||
| @@ -54,15 +61,25 @@ from .validate import SCHEMA_BACKUP | |||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Backup(CoreSysAttributes): | class Backup(JobGroup): | ||||||
|     """A single Supervisor backup.""" |     """A single Supervisor backup.""" | ||||||
|  |  | ||||||
|     def __init__(self, coresys: CoreSys, tar_file: Path): |     def __init__( | ||||||
|  |         self, | ||||||
|  |         coresys: CoreSys, | ||||||
|  |         tar_file: Path, | ||||||
|  |         slug: str, | ||||||
|  |         data: dict[str, Any] | None = None, | ||||||
|  |     ): | ||||||
|         """Initialize a backup.""" |         """Initialize a backup.""" | ||||||
|         self.coresys: CoreSys = coresys |         super().__init__( | ||||||
|  |             coresys, JOB_GROUP_BACKUP.format_map(defaultdict(str, slug=slug)), slug | ||||||
|  |         ) | ||||||
|         self._tarfile: Path = tar_file |         self._tarfile: Path = tar_file | ||||||
|         self._data: dict[str, Any] = {} |         self._data: dict[str, Any] = data or {ATTR_SLUG: slug} | ||||||
|         self._tmp = None |         self._tmp = None | ||||||
|  |         self._outer_secure_tarfile: SecureTarFile | None = None | ||||||
|  |         self._outer_secure_tarfile_tarfile: tarfile.TarFile | None = None | ||||||
|         self._key: bytes | None = None |         self._key: bytes | None = None | ||||||
|         self._aes: Cipher | None = None |         self._aes: Cipher | None = None | ||||||
|  |  | ||||||
| @@ -87,7 +104,7 @@ class Backup(CoreSysAttributes): | |||||||
|         return self._data[ATTR_NAME] |         return self._data[ATTR_NAME] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def date(self): |     def date(self) -> str: | ||||||
|         """Return backup date.""" |         """Return backup date.""" | ||||||
|         return self._data[ATTR_DATE] |         return self._data[ATTR_DATE] | ||||||
|  |  | ||||||
| @@ -102,32 +119,32 @@ class Backup(CoreSysAttributes): | |||||||
|         return self._data[ATTR_COMPRESSED] |         return self._data[ATTR_COMPRESSED] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def addons(self): |     def addons(self) -> list[dict[str, Any]]: | ||||||
|         """Return backup date.""" |         """Return backup date.""" | ||||||
|         return self._data[ATTR_ADDONS] |         return self._data[ATTR_ADDONS] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def addon_list(self): |     def addon_list(self) -> list[str]: | ||||||
|         """Return a list of add-ons slugs.""" |         """Return a list of add-ons slugs.""" | ||||||
|         return [addon_data[ATTR_SLUG] for addon_data in self.addons] |         return [addon_data[ATTR_SLUG] for addon_data in self.addons] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def folders(self): |     def folders(self) -> list[str]: | ||||||
|         """Return list of saved folders.""" |         """Return list of saved folders.""" | ||||||
|         return self._data[ATTR_FOLDERS] |         return self._data[ATTR_FOLDERS] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def repositories(self): |     def repositories(self) -> list[str]: | ||||||
|         """Return backup date.""" |         """Return backup date.""" | ||||||
|         return self._data[ATTR_REPOSITORIES] |         return self._data[ATTR_REPOSITORIES] | ||||||
|  |  | ||||||
|     @repositories.setter |     @repositories.setter | ||||||
|     def repositories(self, value): |     def repositories(self, value: list[str]) -> None: | ||||||
|         """Set backup date.""" |         """Set backup date.""" | ||||||
|         self._data[ATTR_REPOSITORIES] = value |         self._data[ATTR_REPOSITORIES] = value | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def homeassistant_version(self): |     def homeassistant_version(self) -> AwesomeVersion: | ||||||
|         """Return backup Home Assistant version.""" |         """Return backup Home Assistant version.""" | ||||||
|         if self.homeassistant is None: |         if self.homeassistant is None: | ||||||
|             return None |             return None | ||||||
| @@ -141,7 +158,7 @@ class Backup(CoreSysAttributes): | |||||||
|         return self.homeassistant[ATTR_EXCLUDE_DATABASE] |         return self.homeassistant[ATTR_EXCLUDE_DATABASE] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def homeassistant(self): |     def homeassistant(self) -> dict[str, Any]: | ||||||
|         """Return backup Home Assistant data.""" |         """Return backup Home Assistant data.""" | ||||||
|         return self._data[ATTR_HOMEASSISTANT] |         return self._data[ATTR_HOMEASSISTANT] | ||||||
|  |  | ||||||
| @@ -151,12 +168,12 @@ class Backup(CoreSysAttributes): | |||||||
|         return self._data[ATTR_SUPERVISOR_VERSION] |         return self._data[ATTR_SUPERVISOR_VERSION] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def docker(self): |     def docker(self) -> dict[str, Any]: | ||||||
|         """Return backup Docker config data.""" |         """Return backup Docker config data.""" | ||||||
|         return self._data.get(ATTR_DOCKER, {}) |         return self._data.get(ATTR_DOCKER, {}) | ||||||
|  |  | ||||||
|     @docker.setter |     @docker.setter | ||||||
|     def docker(self, value): |     def docker(self, value: dict[str, Any]) -> None: | ||||||
|         """Set the Docker config data.""" |         """Set the Docker config data.""" | ||||||
|         self._data[ATTR_DOCKER] = value |         self._data[ATTR_DOCKER] = value | ||||||
|  |  | ||||||
| @@ -169,32 +186,36 @@ class Backup(CoreSysAttributes): | |||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def size(self): |     def size(self) -> float: | ||||||
|         """Return backup size.""" |         """Return backup size.""" | ||||||
|         if not self.tarfile.is_file(): |         if not self.tarfile.is_file(): | ||||||
|             return 0 |             return 0 | ||||||
|         return round(self.tarfile.stat().st_size / 1048576, 2)  # calc mbyte |         return round(self.tarfile.stat().st_size / 1048576, 2)  # calc mbyte | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def is_new(self): |     def is_new(self) -> bool: | ||||||
|         """Return True if there is new.""" |         """Return True if there is new.""" | ||||||
|         return not self.tarfile.exists() |         return not self.tarfile.exists() | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def tarfile(self): |     def tarfile(self) -> Path: | ||||||
|         """Return path to backup tarfile.""" |         """Return path to backup tarfile.""" | ||||||
|         return self._tarfile |         return self._tarfile | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def is_current(self): |     def is_current(self) -> bool: | ||||||
|         """Return true if backup is current, false if stale.""" |         """Return true if backup is current, false if stale.""" | ||||||
|         return parse_datetime(self.date) >= utcnow() - timedelta( |         return parse_datetime(self.date) >= utcnow() - timedelta( | ||||||
|             days=self.sys_backups.days_until_stale |             days=self.sys_backups.days_until_stale | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def data(self) -> dict[str, Any]: | ||||||
|  |         """Returns a copy of the data.""" | ||||||
|  |         return deepcopy(self._data) | ||||||
|  |  | ||||||
|     def new( |     def new( | ||||||
|         self, |         self, | ||||||
|         slug: str, |  | ||||||
|         name: str, |         name: str, | ||||||
|         date: str, |         date: str, | ||||||
|         sys_type: BackupType, |         sys_type: BackupType, | ||||||
| @@ -204,7 +225,6 @@ class Backup(CoreSysAttributes): | |||||||
|         """Initialize a new backup.""" |         """Initialize a new backup.""" | ||||||
|         # Init metadata |         # Init metadata | ||||||
|         self._data[ATTR_VERSION] = 2 |         self._data[ATTR_VERSION] = 2 | ||||||
|         self._data[ATTR_SLUG] = slug |  | ||||||
|         self._data[ATTR_NAME] = name |         self._data[ATTR_NAME] = name | ||||||
|         self._data[ATTR_DATE] = date |         self._data[ATTR_DATE] = date | ||||||
|         self._data[ATTR_TYPE] = sys_type |         self._data[ATTR_TYPE] = sys_type | ||||||
| @@ -305,25 +325,55 @@ class Backup(CoreSysAttributes): | |||||||
|  |  | ||||||
|     async def __aenter__(self): |     async def __aenter__(self): | ||||||
|         """Async context to open a backup.""" |         """Async context to open a backup.""" | ||||||
|         self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent)) |  | ||||||
|  |  | ||||||
|         # create a backup |         # create a backup | ||||||
|         if not self.tarfile.is_file(): |         if not self.tarfile.is_file(): | ||||||
|             return self |             self._outer_secure_tarfile = SecureTarFile( | ||||||
|  |                 self.tarfile, | ||||||
|  |                 "w", | ||||||
|  |                 gzip=False, | ||||||
|  |                 bufsize=BUF_SIZE, | ||||||
|  |             ) | ||||||
|  |             self._outer_secure_tarfile_tarfile = self._outer_secure_tarfile.__enter__() | ||||||
|  |             return | ||||||
|  |  | ||||||
|         # extract an existing backup |         # extract an existing backup | ||||||
|  |         self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent)) | ||||||
|  |  | ||||||
|         def _extract_backup(): |         def _extract_backup(): | ||||||
|             """Extract a backup.""" |             """Extract a backup.""" | ||||||
|             with tarfile.open(self.tarfile, "r:") as tar: |             with tarfile.open(self.tarfile, "r:") as tar: | ||||||
|                 tar.extractall(path=self._tmp.name, members=secure_path(tar)) |                 tar.extractall( | ||||||
|  |                     path=self._tmp.name, | ||||||
|  |                     members=secure_path(tar), | ||||||
|  |                     filter="fully_trusted", | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|         await self.sys_run_in_executor(_extract_backup) |         await self.sys_run_in_executor(_extract_backup) | ||||||
|  |  | ||||||
|     async def __aexit__(self, exception_type, exception_value, traceback): |     async def __aexit__(self, exception_type, exception_value, traceback): | ||||||
|         """Async context to close a backup.""" |         """Async context to close a backup.""" | ||||||
|         # exists backup or exception on build |         # exists backup or exception on build | ||||||
|         if self.tarfile.is_file() or exception_type is not None: |         try: | ||||||
|             self._tmp.cleanup() |             await self._aexit(exception_type, exception_value, traceback) | ||||||
|  |         finally: | ||||||
|  |             if self._tmp: | ||||||
|  |                 self._tmp.cleanup() | ||||||
|  |             if self._outer_secure_tarfile: | ||||||
|  |                 self._outer_secure_tarfile.__exit__( | ||||||
|  |                     exception_type, exception_value, traceback | ||||||
|  |                 ) | ||||||
|  |                 self._outer_secure_tarfile = None | ||||||
|  |                 self._outer_secure_tarfile_tarfile = None | ||||||
|  |  | ||||||
|  |     async def _aexit(self, exception_type, exception_value, traceback): | ||||||
|  |         """Cleanup after backup creation. | ||||||
|  |  | ||||||
|  |         This is a separate method to allow it to be called from __aexit__ to ensure | ||||||
|  |         that cleanup is always performed, even if an exception is raised. | ||||||
|  |         """ | ||||||
|  |         # If we're not creating a new backup, or if an exception was raised, we're done | ||||||
|  |         if not self._outer_secure_tarfile or exception_type is not None: | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         # validate data |         # validate data | ||||||
| @@ -336,157 +386,254 @@ class Backup(CoreSysAttributes): | |||||||
|             raise ValueError("Invalid config") from None |             raise ValueError("Invalid config") from None | ||||||
|  |  | ||||||
|         # new backup, build it |         # new backup, build it | ||||||
|         def _create_backup(): |         def _add_backup_json(): | ||||||
|             """Create a new backup.""" |             """Create a new backup.""" | ||||||
|             with tarfile.open(self.tarfile, "w:") as tar: |             raw_bytes = json_bytes(self._data) | ||||||
|                 tar.add(self._tmp.name, arcname=".") |             fileobj = io.BytesIO(raw_bytes) | ||||||
|  |             tar_info = tarfile.TarInfo(name="./backup.json") | ||||||
|  |             tar_info.size = len(raw_bytes) | ||||||
|  |             tar_info.mtime = int(time.time()) | ||||||
|  |             self._outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj) | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             write_json_file(Path(self._tmp.name, "backup.json"), self._data) |             await self.sys_run_in_executor(_add_backup_json) | ||||||
|             await self.sys_run_in_executor(_create_backup) |  | ||||||
|         except (OSError, json.JSONDecodeError) as err: |         except (OSError, json.JSONDecodeError) as err: | ||||||
|  |             self.sys_jobs.current.capture_error(BackupError("Can't write backup")) | ||||||
|             _LOGGER.error("Can't write backup: %s", err) |             _LOGGER.error("Can't write backup: %s", err) | ||||||
|         finally: |  | ||||||
|             self._tmp.cleanup() |  | ||||||
|  |  | ||||||
|  |     @Job(name="backup_addon_save", cleanup=False) | ||||||
|  |     async def _addon_save(self, addon: Addon) -> asyncio.Task | None: | ||||||
|  |         """Store an add-on into backup.""" | ||||||
|  |         self.sys_jobs.current.reference = addon.slug | ||||||
|  |  | ||||||
|  |         tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}" | ||||||
|  |  | ||||||
|  |         addon_file = self._outer_secure_tarfile.create_inner_tar( | ||||||
|  |             f"./{tar_name}", | ||||||
|  |             gzip=self.compressed, | ||||||
|  |             key=self._key, | ||||||
|  |         ) | ||||||
|  |         # Take backup | ||||||
|  |         try: | ||||||
|  |             start_task = await addon.backup(addon_file) | ||||||
|  |         except AddonsError as err: | ||||||
|  |             raise BackupError( | ||||||
|  |                 f"Can't create backup for {addon.slug}", _LOGGER.error | ||||||
|  |             ) from err | ||||||
|  |  | ||||||
|  |         # Store to config | ||||||
|  |         self._data[ATTR_ADDONS].append( | ||||||
|  |             { | ||||||
|  |                 ATTR_SLUG: addon.slug, | ||||||
|  |                 ATTR_NAME: addon.name, | ||||||
|  |                 ATTR_VERSION: addon.version, | ||||||
|  |                 ATTR_SIZE: addon_file.size, | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         return start_task | ||||||
|  |  | ||||||
|  |     @Job(name="backup_store_addons", cleanup=False) | ||||||
|     async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]: |     async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]: | ||||||
|         """Add a list of add-ons into backup. |         """Add a list of add-ons into backup. | ||||||
|  |  | ||||||
|         For each addon that needs to be started after backup, returns a Task which |         For each addon that needs to be started after backup, returns a Task which | ||||||
|         completes when that addon has state 'started' (see addon.start). |         completes when that addon has state 'started' (see addon.start). | ||||||
|         """ |         """ | ||||||
|  |         # Save Add-ons sequential avoid issue on slow IO | ||||||
|         async def _addon_save(addon: Addon) -> asyncio.Task | None: |  | ||||||
|             """Task to store an add-on into backup.""" |  | ||||||
|             tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}" |  | ||||||
|             addon_file = SecureTarFile( |  | ||||||
|                 Path(self._tmp.name, tar_name), |  | ||||||
|                 "w", |  | ||||||
|                 key=self._key, |  | ||||||
|                 gzip=self.compressed, |  | ||||||
|                 bufsize=BUF_SIZE, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             # Take backup |  | ||||||
|             try: |  | ||||||
|                 start_task = await addon.backup(addon_file) |  | ||||||
|             except AddonsError: |  | ||||||
|                 _LOGGER.error("Can't create backup for %s", addon.slug) |  | ||||||
|                 return |  | ||||||
|  |  | ||||||
|             # Store to config |  | ||||||
|             self._data[ATTR_ADDONS].append( |  | ||||||
|                 { |  | ||||||
|                     ATTR_SLUG: addon.slug, |  | ||||||
|                     ATTR_NAME: addon.name, |  | ||||||
|                     ATTR_VERSION: addon.version, |  | ||||||
|                     ATTR_SIZE: addon_file.size, |  | ||||||
|                 } |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             return start_task |  | ||||||
|  |  | ||||||
|         # Save Add-ons sequential |  | ||||||
|         # avoid issue on slow IO |  | ||||||
|         start_tasks: list[asyncio.Task] = [] |         start_tasks: list[asyncio.Task] = [] | ||||||
|         for addon in addon_list: |         for addon in addon_list: | ||||||
|             try: |             try: | ||||||
|                 if start_task := await _addon_save(addon): |                 if start_task := await self._addon_save(addon): | ||||||
|                     start_tasks.append(start_task) |                     start_tasks.append(start_task) | ||||||
|             except Exception as err:  # pylint: disable=broad-except |             except Exception as err:  # pylint: disable=broad-except | ||||||
|                 _LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err) |                 _LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err) | ||||||
|  |  | ||||||
|         return start_tasks |         return start_tasks | ||||||
|  |  | ||||||
|     async def restore_addons(self, addon_list: list[str]) -> list[asyncio.Task]: |     @Job(name="backup_addon_restore", cleanup=False) | ||||||
|  |     async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None: | ||||||
|  |         """Restore an add-on from backup.""" | ||||||
|  |         self.sys_jobs.current.reference = addon_slug | ||||||
|  |  | ||||||
|  |         tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" | ||||||
|  |         addon_file = SecureTarFile( | ||||||
|  |             Path(self._tmp.name, tar_name), | ||||||
|  |             "r", | ||||||
|  |             key=self._key, | ||||||
|  |             gzip=self.compressed, | ||||||
|  |             bufsize=BUF_SIZE, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         # If exists inside backup | ||||||
|  |         if not addon_file.path.exists(): | ||||||
|  |             raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error) | ||||||
|  |  | ||||||
|  |         # Perform a restore | ||||||
|  |         try: | ||||||
|  |             return await self.sys_addons.restore(addon_slug, addon_file) | ||||||
|  |         except AddonsError as err: | ||||||
|  |             raise BackupError( | ||||||
|  |                 f"Can't restore backup {addon_slug}", _LOGGER.error | ||||||
|  |             ) from err | ||||||
|  |  | ||||||
|  |     @Job(name="backup_restore_addons", cleanup=False) | ||||||
|  |     async def restore_addons( | ||||||
|  |         self, addon_list: list[str] | ||||||
|  |     ) -> tuple[bool, list[asyncio.Task]]: | ||||||
|         """Restore a list add-on from backup.""" |         """Restore a list add-on from backup.""" | ||||||
|  |         # Save Add-ons sequential avoid issue on slow IO | ||||||
|         async def _addon_restore(addon_slug: str) -> asyncio.Task | None: |  | ||||||
|             """Task to restore an add-on into backup.""" |  | ||||||
|             tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" |  | ||||||
|             addon_file = SecureTarFile( |  | ||||||
|                 Path(self._tmp.name, tar_name), |  | ||||||
|                 "r", |  | ||||||
|                 key=self._key, |  | ||||||
|                 gzip=self.compressed, |  | ||||||
|                 bufsize=BUF_SIZE, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             # If exists inside backup |  | ||||||
|             if not addon_file.path.exists(): |  | ||||||
|                 _LOGGER.error("Can't find backup %s", addon_slug) |  | ||||||
|                 return |  | ||||||
|  |  | ||||||
|             # Perform a restore |  | ||||||
|             try: |  | ||||||
|                 return await self.sys_addons.restore(addon_slug, addon_file) |  | ||||||
|             except AddonsError: |  | ||||||
|                 _LOGGER.error("Can't restore backup %s", addon_slug) |  | ||||||
|  |  | ||||||
|         # Save Add-ons sequential |  | ||||||
|         # avoid issue on slow IO |  | ||||||
|         start_tasks: list[asyncio.Task] = [] |         start_tasks: list[asyncio.Task] = [] | ||||||
|  |         success = True | ||||||
|         for slug in addon_list: |         for slug in addon_list: | ||||||
|             try: |             try: | ||||||
|                 if start_task := await _addon_restore(slug): |                 start_task = await self._addon_restore(slug) | ||||||
|                     start_tasks.append(start_task) |  | ||||||
|             except Exception as err:  # pylint: disable=broad-except |             except Exception as err:  # pylint: disable=broad-except | ||||||
|                 _LOGGER.warning("Can't restore Add-on %s: %s", slug, err) |                 _LOGGER.warning("Can't restore Add-on %s: %s", slug, err) | ||||||
|  |                 success = False | ||||||
|  |             else: | ||||||
|  |                 if start_task: | ||||||
|  |                     start_tasks.append(start_task) | ||||||
|  |  | ||||||
|         return start_tasks |         return (success, start_tasks) | ||||||
|  |  | ||||||
|  |     @Job(name="backup_remove_delta_addons", cleanup=False) | ||||||
|  |     async def remove_delta_addons(self) -> bool: | ||||||
|  |         """Remove addons which are not in this backup.""" | ||||||
|  |         success = True | ||||||
|  |         for addon in self.sys_addons.installed: | ||||||
|  |             if addon.slug in self.addon_list: | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             # Remove Add-on because it's not a part of the new env | ||||||
|  |             # Do it sequential avoid issue on slow IO | ||||||
|  |             try: | ||||||
|  |                 await self.sys_addons.uninstall(addon.slug) | ||||||
|  |             except AddonsError as err: | ||||||
|  |                 self.sys_jobs.current.capture_error(err) | ||||||
|  |                 _LOGGER.warning("Can't uninstall Add-on %s: %s", addon.slug, err) | ||||||
|  |                 success = False | ||||||
|  |  | ||||||
|  |         return success | ||||||
|  |  | ||||||
|  |     @Job(name="backup_folder_save", cleanup=False) | ||||||
|  |     async def _folder_save(self, name: str): | ||||||
|  |         """Take backup of a folder.""" | ||||||
|  |         self.sys_jobs.current.reference = name | ||||||
|  |  | ||||||
|  |         slug_name = name.replace("/", "_") | ||||||
|  |         tar_name = f"{slug_name}.tar{'.gz' if self.compressed else ''}" | ||||||
|  |         origin_dir = Path(self.sys_config.path_supervisor, name) | ||||||
|  |  | ||||||
|  |         # Check if exists | ||||||
|  |         if not origin_dir.is_dir(): | ||||||
|  |             _LOGGER.warning("Can't find backup folder %s", name) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         def _save() -> None: | ||||||
|  |             # Take backup | ||||||
|  |             _LOGGER.info("Backing up folder %s", name) | ||||||
|  |  | ||||||
|  |             with self._outer_secure_tarfile.create_inner_tar( | ||||||
|  |                 f"./{tar_name}", | ||||||
|  |                 gzip=self.compressed, | ||||||
|  |                 key=self._key, | ||||||
|  |             ) as tar_file: | ||||||
|  |                 atomic_contents_add( | ||||||
|  |                     tar_file, | ||||||
|  |                     origin_dir, | ||||||
|  |                     excludes=[ | ||||||
|  |                         bound.bind_mount.local_where.as_posix() | ||||||
|  |                         for bound in self.sys_mounts.bound_mounts | ||||||
|  |                         if bound.bind_mount.local_where | ||||||
|  |                     ], | ||||||
|  |                     arcname=".", | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |             _LOGGER.info("Backup folder %s done", name) | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             await self.sys_run_in_executor(_save) | ||||||
|  |         except (tarfile.TarError, OSError) as err: | ||||||
|  |             raise BackupError( | ||||||
|  |                 f"Can't backup folder {name}: {str(err)}", _LOGGER.error | ||||||
|  |             ) from err | ||||||
|  |  | ||||||
|  |         self._data[ATTR_FOLDERS].append(name) | ||||||
|  |  | ||||||
|  |     @Job(name="backup_store_folders", cleanup=False) | ||||||
|     async def store_folders(self, folder_list: list[str]): |     async def store_folders(self, folder_list: list[str]): | ||||||
|         """Backup Supervisor data into backup.""" |         """Backup Supervisor data into backup.""" | ||||||
|  |         # Save folder sequential avoid issue on slow IO | ||||||
|         async def _folder_save(name: str): |  | ||||||
|             """Take backup of a folder.""" |  | ||||||
|             slug_name = name.replace("/", "_") |  | ||||||
|             tar_name = Path( |  | ||||||
|                 self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}" |  | ||||||
|             ) |  | ||||||
|             origin_dir = Path(self.sys_config.path_supervisor, name) |  | ||||||
|  |  | ||||||
|             # Check if exists |  | ||||||
|             if not origin_dir.is_dir(): |  | ||||||
|                 _LOGGER.warning("Can't find backup folder %s", name) |  | ||||||
|                 return |  | ||||||
|  |  | ||||||
|             def _save() -> None: |  | ||||||
|                 # Take backup |  | ||||||
|                 _LOGGER.info("Backing up folder %s", name) |  | ||||||
|                 with SecureTarFile( |  | ||||||
|                     tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE |  | ||||||
|                 ) as tar_file: |  | ||||||
|                     atomic_contents_add( |  | ||||||
|                         tar_file, |  | ||||||
|                         origin_dir, |  | ||||||
|                         excludes=[ |  | ||||||
|                             bound.bind_mount.local_where.as_posix() |  | ||||||
|                             for bound in self.sys_mounts.bound_mounts |  | ||||||
|                             if bound.bind_mount.local_where |  | ||||||
|                         ], |  | ||||||
|                         arcname=".", |  | ||||||
|                     ) |  | ||||||
|  |  | ||||||
|                 _LOGGER.info("Backup folder %s done", name) |  | ||||||
|  |  | ||||||
|             await self.sys_run_in_executor(_save) |  | ||||||
|             self._data[ATTR_FOLDERS].append(name) |  | ||||||
|  |  | ||||||
|         # Save folder sequential |  | ||||||
|         # avoid issue on slow IO |  | ||||||
|         for folder in folder_list: |         for folder in folder_list: | ||||||
|  |             await self._folder_save(folder) | ||||||
|  |  | ||||||
|  |     @Job(name="backup_folder_restore", cleanup=False) | ||||||
|  |     async def _folder_restore(self, name: str) -> None: | ||||||
|  |         """Restore a folder.""" | ||||||
|  |         self.sys_jobs.current.reference = name | ||||||
|  |  | ||||||
|  |         slug_name = name.replace("/", "_") | ||||||
|  |         tar_name = Path( | ||||||
|  |             self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}" | ||||||
|  |         ) | ||||||
|  |         origin_dir = Path(self.sys_config.path_supervisor, name) | ||||||
|  |  | ||||||
|  |         # Check if exists inside backup | ||||||
|  |         if not tar_name.exists(): | ||||||
|  |             raise BackupInvalidError( | ||||||
|  |                 f"Can't find restore folder {name}", _LOGGER.warning | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Unmount any mounts within folder | ||||||
|  |         bind_mounts = [ | ||||||
|  |             bound.bind_mount | ||||||
|  |             for bound in self.sys_mounts.bound_mounts | ||||||
|  |             if bound.bind_mount.local_where | ||||||
|  |             and bound.bind_mount.local_where.is_relative_to(origin_dir) | ||||||
|  |         ] | ||||||
|  |         if bind_mounts: | ||||||
|  |             await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts]) | ||||||
|  |  | ||||||
|  |         # Clean old stuff | ||||||
|  |         if origin_dir.is_dir(): | ||||||
|  |             await remove_folder(origin_dir, content_only=True) | ||||||
|  |  | ||||||
|  |         # Perform a restore | ||||||
|  |         def _restore() -> bool: | ||||||
|             try: |             try: | ||||||
|                 await _folder_save(folder) |                 _LOGGER.info("Restore folder %s", name) | ||||||
|  |                 with SecureTarFile( | ||||||
|  |                     tar_name, | ||||||
|  |                     "r", | ||||||
|  |                     key=self._key, | ||||||
|  |                     gzip=self.compressed, | ||||||
|  |                     bufsize=BUF_SIZE, | ||||||
|  |                 ) as tar_file: | ||||||
|  |                     tar_file.extractall( | ||||||
|  |                         path=origin_dir, members=tar_file, filter="fully_trusted" | ||||||
|  |                     ) | ||||||
|  |                 _LOGGER.info("Restore folder %s done", name) | ||||||
|             except (tarfile.TarError, OSError) as err: |             except (tarfile.TarError, OSError) as err: | ||||||
|                 raise BackupError( |                 raise BackupError( | ||||||
|                     f"Can't backup folder {folder}: {str(err)}", _LOGGER.error |                     f"Can't restore folder {name}: {err}", _LOGGER.warning | ||||||
|                 ) from err |                 ) from err | ||||||
|  |             return True | ||||||
|  |  | ||||||
|     async def restore_folders(self, folder_list: list[str]): |         try: | ||||||
|  |             return await self.sys_run_in_executor(_restore) | ||||||
|  |         finally: | ||||||
|  |             if bind_mounts: | ||||||
|  |                 await asyncio.gather( | ||||||
|  |                     *[bind_mount.mount() for bind_mount in bind_mounts] | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |     @Job(name="backup_restore_folders", cleanup=False) | ||||||
|  |     async def restore_folders(self, folder_list: list[str]) -> bool: | ||||||
|         """Backup Supervisor data into backup.""" |         """Backup Supervisor data into backup.""" | ||||||
|  |         success = True | ||||||
|  |  | ||||||
|         async def _folder_restore(name: str) -> None: |         async def _folder_restore(name: str) -> bool: | ||||||
|             """Intenal function to restore a folder.""" |             """Intenal function to restore a folder.""" | ||||||
|             slug_name = name.replace("/", "_") |             slug_name = name.replace("/", "_") | ||||||
|             tar_name = Path( |             tar_name = Path( | ||||||
| @@ -497,7 +644,7 @@ class Backup(CoreSysAttributes): | |||||||
|             # Check if exists inside backup |             # Check if exists inside backup | ||||||
|             if not tar_name.exists(): |             if not tar_name.exists(): | ||||||
|                 _LOGGER.warning("Can't find restore folder %s", name) |                 _LOGGER.warning("Can't find restore folder %s", name) | ||||||
|                 return |                 return False | ||||||
|  |  | ||||||
|             # Unmount any mounts within folder |             # Unmount any mounts within folder | ||||||
|             bind_mounts = [ |             bind_mounts = [ | ||||||
| @@ -516,7 +663,7 @@ class Backup(CoreSysAttributes): | |||||||
|                 await remove_folder(origin_dir, content_only=True) |                 await remove_folder(origin_dir, content_only=True) | ||||||
|  |  | ||||||
|             # Perform a restore |             # Perform a restore | ||||||
|             def _restore() -> None: |             def _restore() -> bool: | ||||||
|                 try: |                 try: | ||||||
|                     _LOGGER.info("Restore folder %s", name) |                     _LOGGER.info("Restore folder %s", name) | ||||||
|                     with SecureTarFile( |                     with SecureTarFile( | ||||||
| @@ -526,27 +673,33 @@ class Backup(CoreSysAttributes): | |||||||
|                         gzip=self.compressed, |                         gzip=self.compressed, | ||||||
|                         bufsize=BUF_SIZE, |                         bufsize=BUF_SIZE, | ||||||
|                     ) as tar_file: |                     ) as tar_file: | ||||||
|                         tar_file.extractall(path=origin_dir, members=tar_file) |                         tar_file.extractall( | ||||||
|  |                             path=origin_dir, members=tar_file, filter="fully_trusted" | ||||||
|  |                         ) | ||||||
|                     _LOGGER.info("Restore folder %s done", name) |                     _LOGGER.info("Restore folder %s done", name) | ||||||
|                 except (tarfile.TarError, OSError) as err: |                 except (tarfile.TarError, OSError) as err: | ||||||
|                     _LOGGER.warning("Can't restore folder %s: %s", name, err) |                     _LOGGER.warning("Can't restore folder %s: %s", name, err) | ||||||
|  |                     return False | ||||||
|  |                 return True | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 await self.sys_run_in_executor(_restore) |                 return await self.sys_run_in_executor(_restore) | ||||||
|             finally: |             finally: | ||||||
|                 if bind_mounts: |                 if bind_mounts: | ||||||
|                     await asyncio.gather( |                     await asyncio.gather( | ||||||
|                         *[bind_mount.mount() for bind_mount in bind_mounts] |                         *[bind_mount.mount() for bind_mount in bind_mounts] | ||||||
|                     ) |                     ) | ||||||
|  |  | ||||||
|         # Restore folder sequential |         # Restore folder sequential avoid issue on slow IO | ||||||
|         # avoid issue on slow IO |  | ||||||
|         for folder in folder_list: |         for folder in folder_list: | ||||||
|             try: |             try: | ||||||
|                 await _folder_restore(folder) |                 await self._folder_restore(folder) | ||||||
|             except Exception as err:  # pylint: disable=broad-except |             except Exception as err:  # pylint: disable=broad-except | ||||||
|                 _LOGGER.warning("Can't restore folder %s: %s", folder, err) |                 _LOGGER.warning("Can't restore folder %s: %s", folder, err) | ||||||
|  |                 success = False | ||||||
|  |         return success | ||||||
|  |  | ||||||
|  |     @Job(name="backup_store_homeassistant", cleanup=False) | ||||||
|     async def store_homeassistant(self, exclude_database: bool = False): |     async def store_homeassistant(self, exclude_database: bool = False): | ||||||
|         """Backup Home Assistant Core configuration folder.""" |         """Backup Home Assistant Core configuration folder.""" | ||||||
|         self._data[ATTR_HOMEASSISTANT] = { |         self._data[ATTR_HOMEASSISTANT] = { | ||||||
| @@ -554,12 +707,12 @@ class Backup(CoreSysAttributes): | |||||||
|             ATTR_EXCLUDE_DATABASE: exclude_database, |             ATTR_EXCLUDE_DATABASE: exclude_database, | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         tar_name = f"homeassistant.tar{'.gz' if self.compressed else ''}" | ||||||
|         # Backup Home Assistant Core config directory |         # Backup Home Assistant Core config directory | ||||||
|         tar_name = Path( |         homeassistant_file = self._outer_secure_tarfile.create_inner_tar( | ||||||
|             self._tmp.name, f"homeassistant.tar{'.gz' if self.compressed else ''}" |             f"./{tar_name}", | ||||||
|         ) |             gzip=self.compressed, | ||||||
|         homeassistant_file = SecureTarFile( |             key=self._key, | ||||||
|             tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         await self.sys_homeassistant.backup(homeassistant_file, exclude_database) |         await self.sys_homeassistant.backup(homeassistant_file, exclude_database) | ||||||
| @@ -567,6 +720,7 @@ class Backup(CoreSysAttributes): | |||||||
|         # Store size |         # Store size | ||||||
|         self.homeassistant[ATTR_SIZE] = homeassistant_file.size |         self.homeassistant[ATTR_SIZE] = homeassistant_file.size | ||||||
|  |  | ||||||
|  |     @Job(name="backup_restore_homeassistant", cleanup=False) | ||||||
|     async def restore_homeassistant(self) -> Awaitable[None]: |     async def restore_homeassistant(self) -> Awaitable[None]: | ||||||
|         """Restore Home Assistant Core configuration folder.""" |         """Restore Home Assistant Core configuration folder.""" | ||||||
|         await self.sys_homeassistant.core.stop() |         await self.sys_homeassistant.core.stop() | ||||||
| @@ -600,16 +754,16 @@ class Backup(CoreSysAttributes): | |||||||
|  |  | ||||||
|         return self.sys_create_task(_core_update()) |         return self.sys_create_task(_core_update()) | ||||||
|  |  | ||||||
|     def store_repositories(self): |     def store_repositories(self) -> None: | ||||||
|         """Store repository list into backup.""" |         """Store repository list into backup.""" | ||||||
|         self.repositories = self.sys_store.repository_urls |         self.repositories = self.sys_store.repository_urls | ||||||
|  |  | ||||||
|     async def restore_repositories(self, replace: bool = False): |     def restore_repositories(self, replace: bool = False) -> Awaitable[None]: | ||||||
|         """Restore repositories from backup. |         """Restore repositories from backup. | ||||||
|  |  | ||||||
|         Return a coroutine. |         Return a coroutine. | ||||||
|         """ |         """ | ||||||
|         await self.sys_store.update_repositories( |         return self.sys_store.update_repositories( | ||||||
|             self.repositories, add_with_errors=True, replace=replace |             self.repositories, add_with_errors=True, replace=replace | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -3,6 +3,7 @@ from __future__ import annotations | |||||||
|  |  | ||||||
| import asyncio | import asyncio | ||||||
| from collections.abc import Awaitable, Iterable | from collections.abc import Awaitable, Iterable | ||||||
|  | import errno | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
|  |  | ||||||
| @@ -14,11 +15,17 @@ from ..const import ( | |||||||
|     CoreState, |     CoreState, | ||||||
| ) | ) | ||||||
| from ..dbus.const import UnitActiveState | from ..dbus.const import UnitActiveState | ||||||
| from ..exceptions import AddonsError, BackupError, BackupJobError | from ..exceptions import ( | ||||||
|  |     BackupError, | ||||||
|  |     BackupInvalidError, | ||||||
|  |     BackupJobError, | ||||||
|  |     BackupMountDownError, | ||||||
|  | ) | ||||||
| from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit | from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
| from ..jobs.job_group import JobGroup | from ..jobs.job_group import JobGroup | ||||||
| from ..mounts.mount import Mount | from ..mounts.mount import Mount | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
| from ..utils.common import FileConfiguration | from ..utils.common import FileConfiguration | ||||||
| from ..utils.dt import utcnow | from ..utils.dt import utcnow | ||||||
| from ..utils.sentinel import DEFAULT | from ..utils.sentinel import DEFAULT | ||||||
| @@ -31,18 +38,6 @@ from .validate import ALL_FOLDERS, SCHEMA_BACKUPS_CONFIG | |||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
| def _list_backup_files(path: Path) -> Iterable[Path]: |  | ||||||
|     """Return iterable of backup files, suppress and log OSError for network mounts.""" |  | ||||||
|     try: |  | ||||||
|         # is_dir does a stat syscall which raises if the mount is down |  | ||||||
|         if path.is_dir(): |  | ||||||
|             return path.glob("*.tar") |  | ||||||
|     except OSError as err: |  | ||||||
|         _LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err) |  | ||||||
|  |  | ||||||
|     return [] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BackupManager(FileConfiguration, JobGroup): | class BackupManager(FileConfiguration, JobGroup): | ||||||
|     """Manage backups.""" |     """Manage backups.""" | ||||||
|  |  | ||||||
| @@ -84,11 +79,15 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|  |  | ||||||
|     def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path: |     def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path: | ||||||
|         """Get base path for backup using location or default location.""" |         """Get base path for backup using location or default location.""" | ||||||
|         if location: |  | ||||||
|             return location.local_where |  | ||||||
|  |  | ||||||
|         if location == DEFAULT and self.sys_mounts.default_backup_mount: |         if location == DEFAULT and self.sys_mounts.default_backup_mount: | ||||||
|             return self.sys_mounts.default_backup_mount.local_where |             location = self.sys_mounts.default_backup_mount | ||||||
|  |  | ||||||
|  |         if location: | ||||||
|  |             if not location.local_where.is_mount(): | ||||||
|  |                 raise BackupMountDownError( | ||||||
|  |                     f"{location.name} is down, cannot back-up to it", _LOGGER.error | ||||||
|  |                 ) | ||||||
|  |             return location.local_where | ||||||
|  |  | ||||||
|         return self.sys_config.path_backup |         return self.sys_config.path_backup | ||||||
|  |  | ||||||
| @@ -119,6 +118,19 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         ) |         ) | ||||||
|         self.sys_jobs.current.stage = stage |         self.sys_jobs.current.stage = stage | ||||||
|  |  | ||||||
|  |     def _list_backup_files(self, path: Path) -> Iterable[Path]: | ||||||
|  |         """Return iterable of backup files, suppress and log OSError for network mounts.""" | ||||||
|  |         try: | ||||||
|  |             # is_dir does a stat syscall which raises if the mount is down | ||||||
|  |             if path.is_dir(): | ||||||
|  |                 return path.glob("*.tar") | ||||||
|  |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG and path == self.sys_config.path_backup: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|  |             _LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err) | ||||||
|  |  | ||||||
|  |         return [] | ||||||
|  |  | ||||||
|     def _create_backup( |     def _create_backup( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
| @@ -136,8 +148,8 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         tar_file = Path(self._get_base_path(location), f"{slug}.tar") |         tar_file = Path(self._get_base_path(location), f"{slug}.tar") | ||||||
|  |  | ||||||
|         # init object |         # init object | ||||||
|         backup = Backup(self.coresys, tar_file) |         backup = Backup(self.coresys, tar_file, slug) | ||||||
|         backup.new(slug, name, date_str, sys_type, password, compressed) |         backup.new(name, date_str, sys_type, password, compressed) | ||||||
|  |  | ||||||
|         # Add backup ID to job |         # Add backup ID to job | ||||||
|         self.sys_jobs.current.reference = backup.slug |         self.sys_jobs.current.reference = backup.slug | ||||||
| @@ -162,14 +174,16 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|  |  | ||||||
|         async def _load_backup(tar_file): |         async def _load_backup(tar_file): | ||||||
|             """Load the backup.""" |             """Load the backup.""" | ||||||
|             backup = Backup(self.coresys, tar_file) |             backup = Backup(self.coresys, tar_file, "temp") | ||||||
|             if await backup.load(): |             if await backup.load(): | ||||||
|                 self._backups[backup.slug] = backup |                 self._backups[backup.slug] = Backup( | ||||||
|  |                     self.coresys, tar_file, backup.slug, backup.data | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|         tasks = [ |         tasks = [ | ||||||
|             self.sys_create_task(_load_backup(tar_file)) |             self.sys_create_task(_load_backup(tar_file)) | ||||||
|             for path in self.backup_locations |             for path in self.backup_locations | ||||||
|             for tar_file in _list_backup_files(path) |             for tar_file in self._list_backup_files(path) | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|         _LOGGER.info("Found %d backup files", len(tasks)) |         _LOGGER.info("Found %d backup files", len(tasks)) | ||||||
| @@ -184,6 +198,11 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|             _LOGGER.info("Removed backup file %s", backup.slug) |             _LOGGER.info("Removed backup file %s", backup.slug) | ||||||
|  |  | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if ( | ||||||
|  |                 err.errno == errno.EBADMSG | ||||||
|  |                 and backup.tarfile.parent == self.sys_config.path_backup | ||||||
|  |             ): | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error("Can't remove backup %s: %s", backup.slug, err) |             _LOGGER.error("Can't remove backup %s: %s", backup.slug, err) | ||||||
|             return False |             return False | ||||||
|  |  | ||||||
| @@ -191,7 +210,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|  |  | ||||||
|     async def import_backup(self, tar_file: Path) -> Backup | None: |     async def import_backup(self, tar_file: Path) -> Backup | None: | ||||||
|         """Check backup tarfile and import it.""" |         """Check backup tarfile and import it.""" | ||||||
|         backup = Backup(self.coresys, tar_file) |         backup = Backup(self.coresys, tar_file, "temp") | ||||||
|  |  | ||||||
|         # Read meta data |         # Read meta data | ||||||
|         if not await backup.load(): |         if not await backup.load(): | ||||||
| @@ -208,11 +227,13 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|             backup.tarfile.rename(tar_origin) |             backup.tarfile.rename(tar_origin) | ||||||
|  |  | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error("Can't move backup file to storage: %s", err) |             _LOGGER.error("Can't move backup file to storage: %s", err) | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         # Load new backup |         # Load new backup | ||||||
|         backup = Backup(self.coresys, tar_origin) |         backup = Backup(self.coresys, tar_origin, backup.slug, backup.data) | ||||||
|         if not await backup.load(): |         if not await backup.load(): | ||||||
|             return None |             return None | ||||||
|         _LOGGER.info("Successfully imported %s", backup.slug) |         _LOGGER.info("Successfully imported %s", backup.slug) | ||||||
| @@ -259,9 +280,15 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|  |  | ||||||
|                 self._change_stage(BackupJobStage.FINISHING_FILE, backup) |                 self._change_stage(BackupJobStage.FINISHING_FILE, backup) | ||||||
|  |  | ||||||
|  |         except BackupError as err: | ||||||
|  |             self.sys_jobs.current.capture_error(err) | ||||||
|  |             return None | ||||||
|         except Exception as err:  # pylint: disable=broad-except |         except Exception as err:  # pylint: disable=broad-except | ||||||
|             _LOGGER.exception("Backup %s error", backup.slug) |             _LOGGER.exception("Backup %s error", backup.slug) | ||||||
|             capture_exception(err) |             capture_exception(err) | ||||||
|  |             self.sys_jobs.current.capture_error( | ||||||
|  |                 BackupError(f"Backup {backup.slug} error, see supervisor logs") | ||||||
|  |             ) | ||||||
|             return None |             return None | ||||||
|         else: |         else: | ||||||
|             self._backups[backup.slug] = backup |             self._backups[backup.slug] = backup | ||||||
| @@ -280,6 +307,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         conditions=[JobCondition.RUNNING], |         conditions=[JobCondition.RUNNING], | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|         on_condition=BackupJobError, |         on_condition=BackupJobError, | ||||||
|  |         cleanup=False, | ||||||
|     ) |     ) | ||||||
|     async def do_backup_full( |     async def do_backup_full( | ||||||
|         self, |         self, | ||||||
| @@ -316,6 +344,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         conditions=[JobCondition.RUNNING], |         conditions=[JobCondition.RUNNING], | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|         on_condition=BackupJobError, |         on_condition=BackupJobError, | ||||||
|  |         cleanup=False, | ||||||
|     ) |     ) | ||||||
|     async def do_backup_partial( |     async def do_backup_partial( | ||||||
|         self, |         self, | ||||||
| @@ -378,6 +407,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         Must be called from an existing restore job. |         Must be called from an existing restore job. | ||||||
|         """ |         """ | ||||||
|         addon_start_tasks: list[Awaitable[None]] | None = None |         addon_start_tasks: list[Awaitable[None]] | None = None | ||||||
|  |         success = True | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             task_hass: asyncio.Task | None = None |             task_hass: asyncio.Task | None = None | ||||||
| @@ -389,7 +419,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                 # Process folders |                 # Process folders | ||||||
|                 if folder_list: |                 if folder_list: | ||||||
|                     self._change_stage(RestoreJobStage.FOLDERS, backup) |                     self._change_stage(RestoreJobStage.FOLDERS, backup) | ||||||
|                     await backup.restore_folders(folder_list) |                     success = await backup.restore_folders(folder_list) | ||||||
|  |  | ||||||
|                 # Process Home-Assistant |                 # Process Home-Assistant | ||||||
|                 if homeassistant: |                 if homeassistant: | ||||||
| @@ -399,23 +429,17 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                 # Delete delta add-ons |                 # Delete delta add-ons | ||||||
|                 if replace: |                 if replace: | ||||||
|                     self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup) |                     self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup) | ||||||
|                     for addon in self.sys_addons.installed: |                     success = success and await backup.remove_delta_addons() | ||||||
|                         if addon.slug in backup.addon_list: |  | ||||||
|                             continue |  | ||||||
|  |  | ||||||
|                         # Remove Add-on because it's not a part of the new env |  | ||||||
|                         # Do it sequential avoid issue on slow IO |  | ||||||
|                         try: |  | ||||||
|                             await self.sys_addons.uninstall(addon.slug) |  | ||||||
|                         except AddonsError: |  | ||||||
|                             _LOGGER.warning("Can't uninstall Add-on %s", addon.slug) |  | ||||||
|  |  | ||||||
|                 if addon_list: |                 if addon_list: | ||||||
|                     self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup) |                     self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup) | ||||||
|                     await backup.restore_repositories(replace) |                     await backup.restore_repositories(replace) | ||||||
|  |  | ||||||
|                     self._change_stage(RestoreJobStage.ADDONS, backup) |                     self._change_stage(RestoreJobStage.ADDONS, backup) | ||||||
|                     addon_start_tasks = await backup.restore_addons(addon_list) |                     restore_success, addon_start_tasks = await backup.restore_addons( | ||||||
|  |                         addon_list | ||||||
|  |                     ) | ||||||
|  |                     success = success and restore_success | ||||||
|  |  | ||||||
|                 # Wait for Home Assistant Core update/downgrade |                 # Wait for Home Assistant Core update/downgrade | ||||||
|                 if task_hass: |                 if task_hass: | ||||||
| @@ -423,18 +447,24 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                         RestoreJobStage.AWAIT_HOME_ASSISTANT_RESTART, backup |                         RestoreJobStage.AWAIT_HOME_ASSISTANT_RESTART, backup | ||||||
|                     ) |                     ) | ||||||
|                     await task_hass |                     await task_hass | ||||||
|  |         except BackupError: | ||||||
|  |             raise | ||||||
|         except Exception as err:  # pylint: disable=broad-except |         except Exception as err:  # pylint: disable=broad-except | ||||||
|             _LOGGER.exception("Restore %s error", backup.slug) |             _LOGGER.exception("Restore %s error", backup.slug) | ||||||
|             capture_exception(err) |             capture_exception(err) | ||||||
|             return False |             raise BackupError( | ||||||
|  |                 f"Restore {backup.slug} error, see supervisor logs" | ||||||
|  |             ) from err | ||||||
|         else: |         else: | ||||||
|             if addon_start_tasks: |             if addon_start_tasks: | ||||||
|                 self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup) |                 self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup) | ||||||
|                 # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere |                 # Failure to resume addons post restore is still a restore failure | ||||||
|                 await asyncio.gather(*addon_start_tasks, return_exceptions=True) |                 if any( | ||||||
|  |                     await asyncio.gather(*addon_start_tasks, return_exceptions=True) | ||||||
|  |                 ): | ||||||
|  |                     return False | ||||||
|  |  | ||||||
|             return True |             return success | ||||||
|         finally: |         finally: | ||||||
|             # Leave Home Assistant alone if it wasn't part of the restore |             # Leave Home Assistant alone if it wasn't part of the restore | ||||||
|             if homeassistant: |             if homeassistant: | ||||||
| @@ -442,12 +472,16 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|  |  | ||||||
|                 # Do we need start Home Assistant Core? |                 # Do we need start Home Assistant Core? | ||||||
|                 if not await self.sys_homeassistant.core.is_running(): |                 if not await self.sys_homeassistant.core.is_running(): | ||||||
|                     await self.sys_homeassistant.core.start() |                     await self.sys_homeassistant.core.start( | ||||||
|  |                         _job_override__cleanup=False | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|                 # Check If we can access to API / otherwise restart |                 # Check If we can access to API / otherwise restart | ||||||
|                 if not await self.sys_homeassistant.api.check_api_state(): |                 if not await self.sys_homeassistant.api.check_api_state(): | ||||||
|                     _LOGGER.warning("Need restart HomeAssistant for API") |                     _LOGGER.warning("Need restart HomeAssistant for API") | ||||||
|                     await self.sys_homeassistant.core.restart() |                     await self.sys_homeassistant.core.restart( | ||||||
|  |                         _job_override__cleanup=False | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|         name="backup_manager_full_restore", |         name="backup_manager_full_restore", | ||||||
| @@ -460,6 +494,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         ], |         ], | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|         on_condition=BackupJobError, |         on_condition=BackupJobError, | ||||||
|  |         cleanup=False, | ||||||
|     ) |     ) | ||||||
|     async def do_restore_full( |     async def do_restore_full( | ||||||
|         self, backup: Backup, password: str | None = None |         self, backup: Backup, password: str | None = None | ||||||
| @@ -469,32 +504,34 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         self.sys_jobs.current.reference = backup.slug |         self.sys_jobs.current.reference = backup.slug | ||||||
|  |  | ||||||
|         if backup.sys_type != BackupType.FULL: |         if backup.sys_type != BackupType.FULL: | ||||||
|             _LOGGER.error("%s is only a partial backup!", backup.slug) |             raise BackupInvalidError( | ||||||
|             return False |                 f"{backup.slug} is only a partial backup!", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.protected and not backup.set_password(password): |         if backup.protected and not backup.set_password(password): | ||||||
|             _LOGGER.error("Invalid password for backup %s", backup.slug) |             raise BackupInvalidError( | ||||||
|             return False |                 f"Invalid password for backup {backup.slug}", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.supervisor_version > self.sys_supervisor.version: |         if backup.supervisor_version > self.sys_supervisor.version: | ||||||
|             _LOGGER.error( |             raise BackupInvalidError( | ||||||
|                 "Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.", |                 f"Backup was made on supervisor version {backup.supervisor_version}, " | ||||||
|                 backup.supervisor_version, |                 f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.", | ||||||
|                 self.sys_supervisor.version, |                 _LOGGER.error, | ||||||
|             ) |             ) | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Full-Restore %s start", backup.slug) |         _LOGGER.info("Full-Restore %s start", backup.slug) | ||||||
|         self.sys_core.state = CoreState.FREEZE |         self.sys_core.state = CoreState.FREEZE | ||||||
|  |  | ||||||
|         # Stop Home-Assistant / Add-ons |         try: | ||||||
|         await self.sys_core.shutdown() |             # Stop Home-Assistant / Add-ons | ||||||
|  |             await self.sys_core.shutdown() | ||||||
|  |  | ||||||
|         success = await self._do_restore( |             success = await self._do_restore( | ||||||
|             backup, backup.addon_list, backup.folders, True, True |                 backup, backup.addon_list, backup.folders, True, True | ||||||
|         ) |             ) | ||||||
|  |         finally: | ||||||
|         self.sys_core.state = CoreState.RUNNING |             self.sys_core.state = CoreState.RUNNING | ||||||
|  |  | ||||||
|         if success: |         if success: | ||||||
|             _LOGGER.info("Full-Restore %s done", backup.slug) |             _LOGGER.info("Full-Restore %s done", backup.slug) | ||||||
| @@ -511,6 +548,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         ], |         ], | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|         on_condition=BackupJobError, |         on_condition=BackupJobError, | ||||||
|  |         cleanup=False, | ||||||
|     ) |     ) | ||||||
|     async def do_restore_partial( |     async def do_restore_partial( | ||||||
|         self, |         self, | ||||||
| @@ -533,29 +571,31 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|             homeassistant = True |             homeassistant = True | ||||||
|  |  | ||||||
|         if backup.protected and not backup.set_password(password): |         if backup.protected and not backup.set_password(password): | ||||||
|             _LOGGER.error("Invalid password for backup %s", backup.slug) |             raise BackupInvalidError( | ||||||
|             return False |                 f"Invalid password for backup {backup.slug}", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.homeassistant is None and homeassistant: |         if backup.homeassistant is None and homeassistant: | ||||||
|             _LOGGER.error("No Home Assistant Core data inside the backup") |             raise BackupInvalidError( | ||||||
|             return False |                 "No Home Assistant Core data inside the backup", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.supervisor_version > self.sys_supervisor.version: |         if backup.supervisor_version > self.sys_supervisor.version: | ||||||
|             _LOGGER.error( |             raise BackupInvalidError( | ||||||
|                 "Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.", |                 f"Backup was made on supervisor version {backup.supervisor_version}, " | ||||||
|                 backup.supervisor_version, |                 f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.", | ||||||
|                 self.sys_supervisor.version, |                 _LOGGER.error, | ||||||
|             ) |             ) | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Partial-Restore %s start", backup.slug) |         _LOGGER.info("Partial-Restore %s start", backup.slug) | ||||||
|         self.sys_core.state = CoreState.FREEZE |         self.sys_core.state = CoreState.FREEZE | ||||||
|  |  | ||||||
|         success = await self._do_restore( |         try: | ||||||
|             backup, addon_list, folder_list, homeassistant, False |             success = await self._do_restore( | ||||||
|         ) |                 backup, addon_list, folder_list, homeassistant, False | ||||||
|  |             ) | ||||||
|         self.sys_core.state = CoreState.RUNNING |         finally: | ||||||
|  |             self.sys_core.state = CoreState.RUNNING | ||||||
|  |  | ||||||
|         if success: |         if success: | ||||||
|             _LOGGER.info("Partial-Restore %s done", backup.slug) |             _LOGGER.info("Partial-Restore %s done", backup.slug) | ||||||
|   | |||||||
| @@ -53,7 +53,7 @@ def unique_addons(addons_list): | |||||||
|  |  | ||||||
|  |  | ||||||
| def v1_homeassistant( | def v1_homeassistant( | ||||||
|     homeassistant_data: dict[str, Any] | None |     homeassistant_data: dict[str, Any] | None, | ||||||
| ) -> dict[str, Any] | None: | ) -> dict[str, Any] | None: | ||||||
|     """Cleanup homeassistant artefacts from v1.""" |     """Cleanup homeassistant artefacts from v1.""" | ||||||
|     if not homeassistant_data: |     if not homeassistant_data: | ||||||
|   | |||||||
| @@ -6,7 +6,7 @@ import signal | |||||||
|  |  | ||||||
| from colorlog import ColoredFormatter | from colorlog import ColoredFormatter | ||||||
|  |  | ||||||
| from .addons import AddonManager | from .addons.manager import AddonManager | ||||||
| from .api import RestAPI | from .api import RestAPI | ||||||
| from .arch import CpuArch | from .arch import CpuArch | ||||||
| from .auth import Auth | from .auth import Auth | ||||||
| @@ -115,7 +115,7 @@ async def initialize_coresys() -> CoreSys: | |||||||
|         _LOGGER.warning( |         _LOGGER.warning( | ||||||
|             "Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!" |             "Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!" | ||||||
|         ) |         ) | ||||||
|     _LOGGER.info("Seting up coresys for machine: %s", coresys.machine) |     _LOGGER.info("Setting up coresys for machine: %s", coresys.machine) | ||||||
|  |  | ||||||
|     return coresys |     return coresys | ||||||
|  |  | ||||||
| @@ -256,9 +256,11 @@ def migrate_system_env(coresys: CoreSys) -> None: | |||||||
| def initialize_logging() -> None: | def initialize_logging() -> None: | ||||||
|     """Initialize the logging.""" |     """Initialize the logging.""" | ||||||
|     logging.basicConfig(level=logging.INFO) |     logging.basicConfig(level=logging.INFO) | ||||||
|     fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s" |     fmt = ( | ||||||
|  |         "%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s" | ||||||
|  |     ) | ||||||
|     colorfmt = f"%(log_color)s{fmt}%(reset)s" |     colorfmt = f"%(log_color)s{fmt}%(reset)s" | ||||||
|     datefmt = "%y-%m-%d %H:%M:%S" |     datefmt = "%Y-%m-%d %H:%M:%S" | ||||||
|  |  | ||||||
|     # suppress overly verbose logs from libraries that aren't helpful |     # suppress overly verbose logs from libraries that aren't helpful | ||||||
|     logging.getLogger("aiohttp.access").setLevel(logging.WARNING) |     logging.getLogger("aiohttp.access").setLevel(logging.WARNING) | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| """Bootstrap Supervisor.""" | """Bootstrap Supervisor.""" | ||||||
| from datetime import datetime | from datetime import UTC, datetime | ||||||
| import logging | import logging | ||||||
| import os | import os | ||||||
| from pathlib import Path, PurePath | from pathlib import Path, PurePath | ||||||
| @@ -50,7 +50,7 @@ MOUNTS_CREDENTIALS = PurePath(".mounts_credentials") | |||||||
| EMERGENCY_DATA = PurePath("emergency") | EMERGENCY_DATA = PurePath("emergency") | ||||||
| ADDON_CONFIGS = PurePath("addon_configs") | ADDON_CONFIGS = PurePath("addon_configs") | ||||||
|  |  | ||||||
| DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat() | DEFAULT_BOOT_TIME = datetime.fromtimestamp(0, UTC).isoformat() | ||||||
|  |  | ||||||
| # We filter out UTC because it's the system default fallback | # We filter out UTC because it's the system default fallback | ||||||
| # Core also not respect the cotnainer timezone and reset timezones | # Core also not respect the cotnainer timezone and reset timezones | ||||||
| @@ -164,7 +164,7 @@ class CoreConfig(FileConfiguration): | |||||||
|  |  | ||||||
|         boot_time = parse_datetime(boot_str) |         boot_time = parse_datetime(boot_str) | ||||||
|         if not boot_time: |         if not boot_time: | ||||||
|             return datetime.utcfromtimestamp(1) |             return datetime.fromtimestamp(1, UTC) | ||||||
|         return boot_time |         return boot_time | ||||||
|  |  | ||||||
|     @last_boot.setter |     @last_boot.setter | ||||||
|   | |||||||
| @@ -68,6 +68,7 @@ META_SUPERVISOR = "supervisor" | |||||||
| JSON_DATA = "data" | JSON_DATA = "data" | ||||||
| JSON_MESSAGE = "message" | JSON_MESSAGE = "message" | ||||||
| JSON_RESULT = "result" | JSON_RESULT = "result" | ||||||
|  | JSON_JOB_ID = "job_id" | ||||||
|  |  | ||||||
| RESULT_ERROR = "error" | RESULT_ERROR = "error" | ||||||
| RESULT_OK = "ok" | RESULT_OK = "ok" | ||||||
| @@ -331,6 +332,7 @@ ATTR_UUID = "uuid" | |||||||
| ATTR_VALID = "valid" | ATTR_VALID = "valid" | ||||||
| ATTR_VALUE = "value" | ATTR_VALUE = "value" | ||||||
| ATTR_VERSION = "version" | ATTR_VERSION = "version" | ||||||
|  | ATTR_VERSION_TIMESTAMP = "version_timestamp" | ||||||
| ATTR_VERSION_LATEST = "version_latest" | ATTR_VERSION_LATEST = "version_latest" | ||||||
| ATTR_VIDEO = "video" | ATTR_VIDEO = "video" | ||||||
| ATTR_VLAN = "vlan" | ATTR_VLAN = "vlan" | ||||||
| @@ -345,17 +347,6 @@ PROVIDE_SERVICE = "provide" | |||||||
| NEED_SERVICE = "need" | NEED_SERVICE = "need" | ||||||
| WANT_SERVICE = "want" | WANT_SERVICE = "want" | ||||||
|  |  | ||||||
|  |  | ||||||
| MAP_CONFIG = "config" |  | ||||||
| MAP_SSL = "ssl" |  | ||||||
| MAP_ADDONS = "addons" |  | ||||||
| MAP_BACKUP = "backup" |  | ||||||
| MAP_SHARE = "share" |  | ||||||
| MAP_MEDIA = "media" |  | ||||||
| MAP_HOMEASSISTANT_CONFIG = "homeassistant_config" |  | ||||||
| MAP_ALL_ADDON_CONFIGS = "all_addon_configs" |  | ||||||
| MAP_ADDON_CONFIG = "addon_config" |  | ||||||
|  |  | ||||||
| ARCH_ARMHF = "armhf" | ARCH_ARMHF = "armhf" | ||||||
| ARCH_ARMV7 = "armv7" | ARCH_ARMV7 = "armv7" | ||||||
| ARCH_AARCH64 = "aarch64" | ARCH_AARCH64 = "aarch64" | ||||||
| @@ -469,9 +460,11 @@ class HostFeature(StrEnum): | |||||||
| class BusEvent(StrEnum): | class BusEvent(StrEnum): | ||||||
|     """Bus event type.""" |     """Bus event type.""" | ||||||
|  |  | ||||||
|  |     DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change" | ||||||
|     HARDWARE_NEW_DEVICE = "hardware_new_device" |     HARDWARE_NEW_DEVICE = "hardware_new_device" | ||||||
|     HARDWARE_REMOVE_DEVICE = "hardware_remove_device" |     HARDWARE_REMOVE_DEVICE = "hardware_remove_device" | ||||||
|     DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change" |     SUPERVISOR_JOB_END = "supervisor_job_end" | ||||||
|  |     SUPERVISOR_JOB_START = "supervisor_job_start" | ||||||
|     SUPERVISOR_STATE_CHANGE = "supervisor_state_change" |     SUPERVISOR_STATE_CHANGE = "supervisor_state_change" | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -5,8 +5,6 @@ from contextlib import suppress | |||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| import logging | import logging | ||||||
|  |  | ||||||
| import async_timeout |  | ||||||
|  |  | ||||||
| from .const import ( | from .const import ( | ||||||
|     ATTR_STARTUP, |     ATTR_STARTUP, | ||||||
|     RUN_SUPERVISOR_STATE, |     RUN_SUPERVISOR_STATE, | ||||||
| @@ -28,7 +26,7 @@ from .homeassistant.core import LANDINGPAGE | |||||||
| from .resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason | from .resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason | ||||||
| from .utils.dt import utcnow | from .utils.dt import utcnow | ||||||
| from .utils.sentry import capture_exception | from .utils.sentry import capture_exception | ||||||
| from .utils.whoami import retrieve_whoami | from .utils.whoami import WhoamiData, retrieve_whoami | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -179,7 +177,15 @@ class Core(CoreSysAttributes): | |||||||
|             and not self.sys_dev |             and not self.sys_dev | ||||||
|             and self.supported |             and self.supported | ||||||
|         ): |         ): | ||||||
|             self.sys_dbus.agent.diagnostics = self.sys_config.diagnostics |             try: | ||||||
|  |                 await self.sys_dbus.agent.set_diagnostics(self.sys_config.diagnostics) | ||||||
|  |             except Exception as err:  # pylint: disable=broad-except | ||||||
|  |                 _LOGGER.warning( | ||||||
|  |                     "Could not set diagnostics to %s due to %s", | ||||||
|  |                     self.sys_config.diagnostics, | ||||||
|  |                     err, | ||||||
|  |                 ) | ||||||
|  |                 capture_exception(err) | ||||||
|  |  | ||||||
|         # Evaluate the system |         # Evaluate the system | ||||||
|         await self.sys_resolution.evaluate.evaluate_system() |         await self.sys_resolution.evaluate.evaluate_system() | ||||||
| @@ -298,7 +304,7 @@ class Core(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Stage 1 |         # Stage 1 | ||||||
|         try: |         try: | ||||||
|             async with async_timeout.timeout(10): |             async with asyncio.timeout(10): | ||||||
|                 await asyncio.wait( |                 await asyncio.wait( | ||||||
|                     [ |                     [ | ||||||
|                         self.sys_create_task(coro) |                         self.sys_create_task(coro) | ||||||
| @@ -314,7 +320,7 @@ class Core(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Stage 2 |         # Stage 2 | ||||||
|         try: |         try: | ||||||
|             async with async_timeout.timeout(10): |             async with asyncio.timeout(10): | ||||||
|                 await asyncio.wait( |                 await asyncio.wait( | ||||||
|                     [ |                     [ | ||||||
|                         self.sys_create_task(coro) |                         self.sys_create_task(coro) | ||||||
| @@ -339,9 +345,6 @@ class Core(CoreSysAttributes): | |||||||
|         if self.state == CoreState.RUNNING: |         if self.state == CoreState.RUNNING: | ||||||
|             self.state = CoreState.SHUTDOWN |             self.state = CoreState.SHUTDOWN | ||||||
|  |  | ||||||
|         # Stop docker monitoring |  | ||||||
|         await self.sys_docker.unload() |  | ||||||
|  |  | ||||||
|         # Shutdown Application Add-ons, using Home Assistant API |         # Shutdown Application Add-ons, using Home Assistant API | ||||||
|         await self.sys_addons.shutdown(AddonStartup.APPLICATION) |         await self.sys_addons.shutdown(AddonStartup.APPLICATION) | ||||||
|  |  | ||||||
| @@ -363,6 +366,13 @@ class Core(CoreSysAttributes): | |||||||
|         self.sys_config.last_boot = self.sys_hardware.helper.last_boot |         self.sys_config.last_boot = self.sys_hardware.helper.last_boot | ||||||
|         self.sys_config.save_data() |         self.sys_config.save_data() | ||||||
|  |  | ||||||
|  |     async def _retrieve_whoami(self, with_ssl: bool) -> WhoamiData | None: | ||||||
|  |         try: | ||||||
|  |             return await retrieve_whoami(self.sys_websession, with_ssl) | ||||||
|  |         except WhoamiSSLError: | ||||||
|  |             _LOGGER.info("Whoami service SSL error") | ||||||
|  |             return None | ||||||
|  |  | ||||||
|     async def _adjust_system_datetime(self): |     async def _adjust_system_datetime(self): | ||||||
|         """Adjust system time/date on startup.""" |         """Adjust system time/date on startup.""" | ||||||
|         # If no timezone is detect or set |         # If no timezone is detect or set | ||||||
| @@ -375,21 +385,15 @@ class Core(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Get Timezone data |         # Get Timezone data | ||||||
|         try: |         try: | ||||||
|             data = await retrieve_whoami(self.sys_websession) |             data = await self._retrieve_whoami(True) | ||||||
|         except WhoamiSSLError: |  | ||||||
|             pass |             # SSL Date Issue & possible time drift | ||||||
|  |             if not data: | ||||||
|  |                 data = await self._retrieve_whoami(False) | ||||||
|         except WhoamiError as err: |         except WhoamiError as err: | ||||||
|             _LOGGER.warning("Can't adjust Time/Date settings: %s", err) |             _LOGGER.warning("Can't adjust Time/Date settings: %s", err) | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         # SSL Date Issue & possible time drift |  | ||||||
|         if not data: |  | ||||||
|             try: |  | ||||||
|                 data = await retrieve_whoami(self.sys_websession, with_ssl=False) |  | ||||||
|             except WhoamiError as err: |  | ||||||
|                 _LOGGER.error("Can't adjust Time/Date settings: %s", err) |  | ||||||
|                 return |  | ||||||
|  |  | ||||||
|         self.sys_config.timezone = self.sys_config.timezone or data.timezone |         self.sys_config.timezone = self.sys_config.timezone or data.timezone | ||||||
|  |  | ||||||
|         # Calculate if system time is out of sync |         # Calculate if system time is out of sync | ||||||
|   | |||||||
| @@ -18,7 +18,7 @@ from .const import ENV_SUPERVISOR_DEV, SERVER_SOFTWARE | |||||||
| from .utils.dt import UTC, get_time_zone | from .utils.dt import UTC, get_time_zone | ||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from .addons import AddonManager |     from .addons.manager import AddonManager | ||||||
|     from .api import RestAPI |     from .api import RestAPI | ||||||
|     from .arch import CpuArch |     from .arch import CpuArch | ||||||
|     from .auth import Auth |     from .auth import Auth | ||||||
| @@ -544,13 +544,44 @@ class CoreSys: | |||||||
|  |  | ||||||
|         return self.loop.run_in_executor(None, funct, *args) |         return self.loop.run_in_executor(None, funct, *args) | ||||||
|  |  | ||||||
|     def create_task(self, coroutine: Coroutine) -> asyncio.Task: |     def _create_context(self) -> Context: | ||||||
|         """Create an async task.""" |         """Create a new context for a task.""" | ||||||
|         context = copy_context() |         context = copy_context() | ||||||
|         for callback in self._set_task_context: |         for callback in self._set_task_context: | ||||||
|             context = callback(context) |             context = callback(context) | ||||||
|  |         return context | ||||||
|  |  | ||||||
|         return self.loop.create_task(coroutine, context=context) |     def create_task(self, coroutine: Coroutine) -> asyncio.Task: | ||||||
|  |         """Create an async task.""" | ||||||
|  |         return self.loop.create_task(coroutine, context=self._create_context()) | ||||||
|  |  | ||||||
|  |     def call_later( | ||||||
|  |         self, | ||||||
|  |         delay: float, | ||||||
|  |         funct: Callable[..., Coroutine[Any, Any, T]], | ||||||
|  |         *args: tuple[Any], | ||||||
|  |         **kwargs: dict[str, Any], | ||||||
|  |     ) -> asyncio.TimerHandle: | ||||||
|  |         """Start a task after a delay.""" | ||||||
|  |         if kwargs: | ||||||
|  |             funct = partial(funct, **kwargs) | ||||||
|  |  | ||||||
|  |         return self.loop.call_later(delay, funct, *args, context=self._create_context()) | ||||||
|  |  | ||||||
|  |     def call_at( | ||||||
|  |         self, | ||||||
|  |         when: datetime, | ||||||
|  |         funct: Callable[..., Coroutine[Any, Any, T]], | ||||||
|  |         *args: tuple[Any], | ||||||
|  |         **kwargs: dict[str, Any], | ||||||
|  |     ) -> asyncio.TimerHandle: | ||||||
|  |         """Start a task at the specified datetime.""" | ||||||
|  |         if kwargs: | ||||||
|  |             funct = partial(funct, **kwargs) | ||||||
|  |  | ||||||
|  |         return self.loop.call_at( | ||||||
|  |             when.timestamp(), funct, *args, context=self._create_context() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class CoreSysAttributes: | class CoreSysAttributes: | ||||||
| @@ -731,3 +762,23 @@ class CoreSysAttributes: | |||||||
|     def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task: |     def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task: | ||||||
|         """Create an async task.""" |         """Create an async task.""" | ||||||
|         return self.coresys.create_task(coroutine) |         return self.coresys.create_task(coroutine) | ||||||
|  |  | ||||||
|  |     def sys_call_later( | ||||||
|  |         self, | ||||||
|  |         delay: float, | ||||||
|  |         funct: Callable[..., Coroutine[Any, Any, T]], | ||||||
|  |         *args: tuple[Any], | ||||||
|  |         **kwargs: dict[str, Any], | ||||||
|  |     ) -> asyncio.TimerHandle: | ||||||
|  |         """Start a task after a delay.""" | ||||||
|  |         return self.coresys.call_later(delay, funct, *args, **kwargs) | ||||||
|  |  | ||||||
|  |     def sys_call_at( | ||||||
|  |         self, | ||||||
|  |         when: datetime, | ||||||
|  |         funct: Callable[..., Coroutine[Any, Any, T]], | ||||||
|  |         *args: tuple[Any], | ||||||
|  |         **kwargs: dict[str, Any], | ||||||
|  |     ) -> asyncio.TimerHandle: | ||||||
|  |         """Start a task at the specified datetime.""" | ||||||
|  |         return self.coresys.call_at(when, funct, *args, **kwargs) | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ | |||||||
|   "raspberrypi3-64": ["aarch64", "armv7", "armhf"], |   "raspberrypi3-64": ["aarch64", "armv7", "armhf"], | ||||||
|   "raspberrypi4": ["armv7", "armhf"], |   "raspberrypi4": ["armv7", "armhf"], | ||||||
|   "raspberrypi4-64": ["aarch64", "armv7", "armhf"], |   "raspberrypi4-64": ["aarch64", "armv7", "armhf"], | ||||||
|  |   "raspberrypi5-64": ["aarch64", "armv7", "armhf"], | ||||||
|   "yellow": ["aarch64", "armv7", "armhf"], |   "yellow": ["aarch64", "armv7", "armhf"], | ||||||
|   "green": ["aarch64", "armv7", "armhf"], |   "green": ["aarch64", "armv7", "armhf"], | ||||||
|   "tinker": ["armv7", "armhf"], |   "tinker": ["armv7", "armhf"], | ||||||
|   | |||||||
| @@ -1,12 +1,13 @@ | |||||||
| """OS-Agent implementation for DBUS.""" | """OS-Agent implementation for DBUS.""" | ||||||
| import asyncio | import asyncio | ||||||
|  | from collections.abc import Awaitable | ||||||
| import logging | import logging | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from awesomeversion import AwesomeVersion | from awesomeversion import AwesomeVersion | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ...exceptions import DBusError, DBusInterfaceError | from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DBUS_ATTR_DIAGNOSTICS, |     DBUS_ATTR_DIAGNOSTICS, | ||||||
|     DBUS_ATTR_VERSION, |     DBUS_ATTR_VERSION, | ||||||
| @@ -80,11 +81,9 @@ class OSAgent(DBusInterfaceProxy): | |||||||
|         """Return if diagnostics is enabled on OS-Agent.""" |         """Return if diagnostics is enabled on OS-Agent.""" | ||||||
|         return self.properties[DBUS_ATTR_DIAGNOSTICS] |         return self.properties[DBUS_ATTR_DIAGNOSTICS] | ||||||
|  |  | ||||||
|     @diagnostics.setter |     def set_diagnostics(self, value: bool) -> Awaitable[None]: | ||||||
|     @dbus_property |  | ||||||
|     def diagnostics(self, value: bool) -> None: |  | ||||||
|         """Enable or disable OS-Agent diagnostics.""" |         """Enable or disable OS-Agent diagnostics.""" | ||||||
|         asyncio.create_task(self.dbus.set_diagnostics(value)) |         return self.dbus.set_diagnostics(value) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def all(self) -> list[DBusInterface]: |     def all(self) -> list[DBusInterface]: | ||||||
| @@ -99,7 +98,7 @@ class OSAgent(DBusInterfaceProxy): | |||||||
|             await asyncio.gather(*[dbus.connect(bus) for dbus in self.all]) |             await asyncio.gather(*[dbus.connect(bus) for dbus in self.all]) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to OS-Agent") |             _LOGGER.warning("Can't connect to OS-Agent") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No OS-Agent support on the host. Some Host functions have been disabled." |                 "No OS-Agent support on the host. Some Host functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| """Green board management.""" | """Green board management.""" | ||||||
|  |  | ||||||
| import asyncio | import asyncio | ||||||
|  | from collections.abc import Awaitable | ||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| @@ -25,11 +26,10 @@ class Green(BoardProxy): | |||||||
|         """Get activity LED enabled.""" |         """Get activity LED enabled.""" | ||||||
|         return self.properties[DBUS_ATTR_ACTIVITY_LED] |         return self.properties[DBUS_ATTR_ACTIVITY_LED] | ||||||
|  |  | ||||||
|     @activity_led.setter |     def set_activity_led(self, enabled: bool) -> Awaitable[None]: | ||||||
|     def activity_led(self, enabled: bool) -> None: |  | ||||||
|         """Enable/disable activity LED.""" |         """Enable/disable activity LED.""" | ||||||
|         self._data[ATTR_ACTIVITY_LED] = enabled |         self._data[ATTR_ACTIVITY_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Green.set_activity_led(enabled)) |         return self.dbus.Boards.Green.set_activity_led(enabled) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     @dbus_property |     @dbus_property | ||||||
| @@ -37,11 +37,10 @@ class Green(BoardProxy): | |||||||
|         """Get power LED enabled.""" |         """Get power LED enabled.""" | ||||||
|         return self.properties[DBUS_ATTR_POWER_LED] |         return self.properties[DBUS_ATTR_POWER_LED] | ||||||
|  |  | ||||||
|     @power_led.setter |     def set_power_led(self, enabled: bool) -> Awaitable[None]: | ||||||
|     def power_led(self, enabled: bool) -> None: |  | ||||||
|         """Enable/disable power LED.""" |         """Enable/disable power LED.""" | ||||||
|         self._data[ATTR_POWER_LED] = enabled |         self._data[ATTR_POWER_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Green.set_power_led(enabled)) |         return self.dbus.Boards.Green.set_power_led(enabled) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     @dbus_property |     @dbus_property | ||||||
| @@ -49,17 +48,18 @@ class Green(BoardProxy): | |||||||
|         """Get user LED enabled.""" |         """Get user LED enabled.""" | ||||||
|         return self.properties[DBUS_ATTR_USER_LED] |         return self.properties[DBUS_ATTR_USER_LED] | ||||||
|  |  | ||||||
|     @user_led.setter |     def set_user_led(self, enabled: bool) -> Awaitable[None]: | ||||||
|     def user_led(self, enabled: bool) -> None: |  | ||||||
|         """Enable/disable disk LED.""" |         """Enable/disable disk LED.""" | ||||||
|         self._data[ATTR_USER_LED] = enabled |         self._data[ATTR_USER_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Green.set_user_led(enabled)) |         return self.dbus.Boards.Green.set_user_led(enabled) | ||||||
|  |  | ||||||
|     async def connect(self, bus: MessageBus) -> None: |     async def connect(self, bus: MessageBus) -> None: | ||||||
|         """Connect to D-Bus.""" |         """Connect to D-Bus.""" | ||||||
|         await super().connect(bus) |         await super().connect(bus) | ||||||
|  |  | ||||||
|         # Set LEDs based on settings on connect |         # Set LEDs based on settings on connect | ||||||
|         self.activity_led = self._data[ATTR_ACTIVITY_LED] |         await asyncio.gather( | ||||||
|         self.power_led = self._data[ATTR_POWER_LED] |             self.set_activity_led(self._data[ATTR_ACTIVITY_LED]), | ||||||
|         self.user_led = self._data[ATTR_USER_LED] |             self.set_power_led(self._data[ATTR_POWER_LED]), | ||||||
|  |             self.set_user_led(self._data[ATTR_USER_LED]), | ||||||
|  |         ) | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| """Yellow board management.""" | """Yellow board management.""" | ||||||
|  |  | ||||||
| import asyncio | import asyncio | ||||||
|  | from collections.abc import Awaitable | ||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| @@ -25,11 +26,10 @@ class Yellow(BoardProxy): | |||||||
|         """Get heartbeat LED enabled.""" |         """Get heartbeat LED enabled.""" | ||||||
|         return self.properties[DBUS_ATTR_HEARTBEAT_LED] |         return self.properties[DBUS_ATTR_HEARTBEAT_LED] | ||||||
|  |  | ||||||
|     @heartbeat_led.setter |     def set_heartbeat_led(self, enabled: bool) -> Awaitable[None]: | ||||||
|     def heartbeat_led(self, enabled: bool) -> None: |  | ||||||
|         """Enable/disable heartbeat LED.""" |         """Enable/disable heartbeat LED.""" | ||||||
|         self._data[ATTR_HEARTBEAT_LED] = enabled |         self._data[ATTR_HEARTBEAT_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled)) |         return self.dbus.Boards.Yellow.set_heartbeat_led(enabled) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     @dbus_property |     @dbus_property | ||||||
| @@ -37,11 +37,10 @@ class Yellow(BoardProxy): | |||||||
|         """Get power LED enabled.""" |         """Get power LED enabled.""" | ||||||
|         return self.properties[DBUS_ATTR_POWER_LED] |         return self.properties[DBUS_ATTR_POWER_LED] | ||||||
|  |  | ||||||
|     @power_led.setter |     def set_power_led(self, enabled: bool) -> Awaitable[None]: | ||||||
|     def power_led(self, enabled: bool) -> None: |  | ||||||
|         """Enable/disable power LED.""" |         """Enable/disable power LED.""" | ||||||
|         self._data[ATTR_POWER_LED] = enabled |         self._data[ATTR_POWER_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled)) |         return self.dbus.Boards.Yellow.set_power_led(enabled) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     @dbus_property |     @dbus_property | ||||||
| @@ -49,17 +48,18 @@ class Yellow(BoardProxy): | |||||||
|         """Get disk LED enabled.""" |         """Get disk LED enabled.""" | ||||||
|         return self.properties[DBUS_ATTR_DISK_LED] |         return self.properties[DBUS_ATTR_DISK_LED] | ||||||
|  |  | ||||||
|     @disk_led.setter |     def set_disk_led(self, enabled: bool) -> Awaitable[None]: | ||||||
|     def disk_led(self, enabled: bool) -> None: |  | ||||||
|         """Enable/disable disk LED.""" |         """Enable/disable disk LED.""" | ||||||
|         self._data[ATTR_DISK_LED] = enabled |         self._data[ATTR_DISK_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled)) |         return self.dbus.Boards.Yellow.set_disk_led(enabled) | ||||||
|  |  | ||||||
|     async def connect(self, bus: MessageBus) -> None: |     async def connect(self, bus: MessageBus) -> None: | ||||||
|         """Connect to D-Bus.""" |         """Connect to D-Bus.""" | ||||||
|         await super().connect(bus) |         await super().connect(bus) | ||||||
|  |  | ||||||
|         # Set LEDs based on settings on connect |         # Set LEDs based on settings on connect | ||||||
|         self.disk_led = self._data[ATTR_DISK_LED] |         await asyncio.gather( | ||||||
|         self.heartbeat_led = self._data[ATTR_HEARTBEAT_LED] |             self.set_disk_led(self._data[ATTR_DISK_LED]), | ||||||
|         self.power_led = self._data[ATTR_POWER_LED] |             self.set_heartbeat_led(self._data[ATTR_HEARTBEAT_LED]), | ||||||
|  |             self.set_power_led(self._data[ATTR_POWER_LED]), | ||||||
|  |         ) | ||||||
|   | |||||||
| @@ -12,6 +12,6 @@ class System(DBusInterface): | |||||||
|     object_path: str = DBUS_OBJECT_HAOS_SYSTEM |     object_path: str = DBUS_OBJECT_HAOS_SYSTEM | ||||||
|  |  | ||||||
|     @dbus_connected |     @dbus_connected | ||||||
|     async def schedule_wipe_device(self) -> None: |     async def schedule_wipe_device(self) -> bool: | ||||||
|         """Schedule a factory reset on next system boot.""" |         """Schedule a factory reset on next system boot.""" | ||||||
|         await self.dbus.System.call_schedule_wipe_device() |         return await self.dbus.System.call_schedule_wipe_device() | ||||||
|   | |||||||
| @@ -36,12 +36,14 @@ DBUS_IFACE_RAUC_INSTALLER = "de.pengutronix.rauc.Installer" | |||||||
| DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager" | DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager" | ||||||
| DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection" | DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection" | ||||||
| DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager" | DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager" | ||||||
|  | DBUS_IFACE_SYSTEMD_UNIT = "org.freedesktop.systemd1.Unit" | ||||||
| DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1" | DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1" | ||||||
| DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager" | DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager" | ||||||
|  |  | ||||||
| DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = ( | DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = ( | ||||||
|     "org.freedesktop.NetworkManager.Connection.Active.StateChanged" |     "org.freedesktop.NetworkManager.Connection.Active.StateChanged" | ||||||
| ) | ) | ||||||
|  | DBUS_SIGNAL_PROPERTIES_CHANGED = "org.freedesktop.DBus.Properties.PropertiesChanged" | ||||||
| DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed" | DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed" | ||||||
|  |  | ||||||
| DBUS_OBJECT_BASE = "/" | DBUS_OBJECT_BASE = "/" | ||||||
| @@ -59,11 +61,13 @@ DBUS_OBJECT_RESOLVED = "/org/freedesktop/resolve1" | |||||||
| DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings" | DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings" | ||||||
| DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1" | DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1" | ||||||
| DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1" | DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1" | ||||||
| DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2/Manager" | DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2" | ||||||
|  | DBUS_OBJECT_UDISKS2_MANAGER = "/org/freedesktop/UDisks2/Manager" | ||||||
|  |  | ||||||
| DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint" | DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint" | ||||||
| DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection" | DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection" | ||||||
| DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections" | DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections" | ||||||
|  | DBUS_ATTR_ACTIVE_STATE = "ActiveState" | ||||||
| DBUS_ATTR_ACTIVITY_LED = "ActivityLED" | DBUS_ATTR_ACTIVITY_LED = "ActivityLED" | ||||||
| DBUS_ATTR_ADDRESS_DATA = "AddressData" | DBUS_ATTR_ADDRESS_DATA = "AddressData" | ||||||
| DBUS_ATTR_BITRATE = "Bitrate" | DBUS_ATTR_BITRATE = "Bitrate" | ||||||
| @@ -177,6 +181,7 @@ DBUS_ATTR_UUID = "Uuid" | |||||||
| DBUS_ATTR_VARIANT = "Variant" | DBUS_ATTR_VARIANT = "Variant" | ||||||
| DBUS_ATTR_VENDOR = "Vendor" | DBUS_ATTR_VENDOR = "Vendor" | ||||||
| DBUS_ATTR_VERSION = "Version" | DBUS_ATTR_VERSION = "Version" | ||||||
|  | DBUS_ATTR_VIRTUALIZATION = "Virtualization" | ||||||
| DBUS_ATTR_WHAT = "What" | DBUS_ATTR_WHAT = "What" | ||||||
| DBUS_ATTR_WWN = "WWN" | DBUS_ATTR_WWN = "WWN" | ||||||
|  |  | ||||||
|   | |||||||
| @@ -3,7 +3,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_CHASSIS, |     DBUS_ATTR_CHASSIS, | ||||||
|     DBUS_ATTR_DEPLOYMENT, |     DBUS_ATTR_DEPLOYMENT, | ||||||
| @@ -39,7 +39,7 @@ class Hostname(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-hostname") |             _LOGGER.warning("Can't connect to systemd-hostname") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No hostname support on the host. Hostname functions have been disabled." |                 "No hostname support on the host. Hostname functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -3,7 +3,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from .const import DBUS_NAME_LOGIND, DBUS_OBJECT_LOGIND | from .const import DBUS_NAME_LOGIND, DBUS_OBJECT_LOGIND | ||||||
| from .interface import DBusInterface | from .interface import DBusInterface | ||||||
| from .utils import dbus_connected | from .utils import dbus_connected | ||||||
| @@ -28,8 +28,8 @@ class Logind(DBusInterface): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-logind") |             _LOGGER.warning("Can't connect to systemd-logind") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.info("No systemd-logind support on the host.") |             _LOGGER.warning("No systemd-logind support on the host.") | ||||||
|  |  | ||||||
|     @dbus_connected |     @dbus_connected | ||||||
|     async def reboot(self) -> None: |     async def reboot(self) -> None: | ||||||
|   | |||||||
| @@ -17,7 +17,7 @@ from .rauc import Rauc | |||||||
| from .resolved import Resolved | from .resolved import Resolved | ||||||
| from .systemd import Systemd | from .systemd import Systemd | ||||||
| from .timedate import TimeDate | from .timedate import TimeDate | ||||||
| from .udisks2 import UDisks2 | from .udisks2 import UDisks2Manager | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -37,7 +37,7 @@ class DBusManager(CoreSysAttributes): | |||||||
|         self._agent: OSAgent = OSAgent() |         self._agent: OSAgent = OSAgent() | ||||||
|         self._timedate: TimeDate = TimeDate() |         self._timedate: TimeDate = TimeDate() | ||||||
|         self._resolved: Resolved = Resolved() |         self._resolved: Resolved = Resolved() | ||||||
|         self._udisks2: UDisks2 = UDisks2() |         self._udisks2: UDisks2Manager = UDisks2Manager() | ||||||
|         self._bus: MessageBus | None = None |         self._bus: MessageBus | None = None | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -81,7 +81,7 @@ class DBusManager(CoreSysAttributes): | |||||||
|         return self._resolved |         return self._resolved | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def udisks2(self) -> UDisks2: |     def udisks2(self) -> UDisks2Manager: | ||||||
|         """Return the udisks2 interface.""" |         """Return the udisks2 interface.""" | ||||||
|         return self._udisks2 |         return self._udisks2 | ||||||
|  |  | ||||||
|   | |||||||
| @@ -9,6 +9,8 @@ from ...exceptions import ( | |||||||
|     DBusError, |     DBusError, | ||||||
|     DBusFatalError, |     DBusFatalError, | ||||||
|     DBusInterfaceError, |     DBusInterfaceError, | ||||||
|  |     DBusNoReplyError, | ||||||
|  |     DBusServiceUnkownError, | ||||||
|     HostNotSupportedError, |     HostNotSupportedError, | ||||||
|     NetworkInterfaceNotFound, |     NetworkInterfaceNotFound, | ||||||
| ) | ) | ||||||
| @@ -143,7 +145,7 @@ class NetworkManager(DBusInterfaceProxy): | |||||||
|             await self.settings.connect(bus) |             await self.settings.connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to Network Manager") |             _LOGGER.warning("Can't connect to Network Manager") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No Network Manager support on the host. Local network functions have been disabled." |                 "No Network Manager support on the host. Local network functions have been disabled." | ||||||
|             ) |             ) | ||||||
| @@ -210,8 +212,22 @@ class NetworkManager(DBusInterfaceProxy): | |||||||
|                     # try to query it. Ignore those cases. |                     # try to query it. Ignore those cases. | ||||||
|                     _LOGGER.debug("Can't process %s: %s", device, err) |                     _LOGGER.debug("Can't process %s: %s", device, err) | ||||||
|                     continue |                     continue | ||||||
|  |                 except ( | ||||||
|  |                     DBusNoReplyError, | ||||||
|  |                     DBusServiceUnkownError, | ||||||
|  |                 ) as err: | ||||||
|  |                     # This typically means that NetworkManager disappeared. Give up immeaditly. | ||||||
|  |                     _LOGGER.error( | ||||||
|  |                         "NetworkManager not responding while processing %s: %s. Giving up.", | ||||||
|  |                         device, | ||||||
|  |                         err, | ||||||
|  |                     ) | ||||||
|  |                     capture_exception(err) | ||||||
|  |                     return | ||||||
|                 except Exception as err:  # pylint: disable=broad-except |                 except Exception as err:  # pylint: disable=broad-except | ||||||
|                     _LOGGER.exception("Error while processing %s: %s", device, err) |                     _LOGGER.exception( | ||||||
|  |                         "Unkown error while processing %s: %s", device, err | ||||||
|  |                     ) | ||||||
|                     capture_exception(err) |                     capture_exception(err) | ||||||
|                     continue |                     continue | ||||||
|  |  | ||||||
|   | |||||||
| @@ -12,7 +12,7 @@ from ...const import ( | |||||||
|     ATTR_PRIORITY, |     ATTR_PRIORITY, | ||||||
|     ATTR_VPN, |     ATTR_VPN, | ||||||
| ) | ) | ||||||
| from ...exceptions import DBusError, DBusInterfaceError | from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DBUS_ATTR_CONFIGURATION, |     DBUS_ATTR_CONFIGURATION, | ||||||
|     DBUS_ATTR_MODE, |     DBUS_ATTR_MODE, | ||||||
| @@ -67,7 +67,7 @@ class NetworkManagerDNS(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to DnsManager") |             _LOGGER.warning("Can't connect to DnsManager") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No DnsManager support on the host. Local DNS functions have been disabled." |                 "No DnsManager support on the host. Local DNS functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -7,6 +7,8 @@ from uuid import uuid4 | |||||||
|  |  | ||||||
| from dbus_fast import Variant | from dbus_fast import Variant | ||||||
|  |  | ||||||
|  | from ....host.const import InterfaceMethod, InterfaceType | ||||||
|  | from .. import NetworkManager | ||||||
| from . import ( | from . import ( | ||||||
|     ATTR_ASSIGNED_MAC, |     ATTR_ASSIGNED_MAC, | ||||||
|     CONF_ATTR_802_ETHERNET, |     CONF_ATTR_802_ETHERNET, | ||||||
| @@ -19,8 +21,6 @@ from . import ( | |||||||
|     CONF_ATTR_PATH, |     CONF_ATTR_PATH, | ||||||
|     CONF_ATTR_VLAN, |     CONF_ATTR_VLAN, | ||||||
| ) | ) | ||||||
| from .. import NetworkManager |  | ||||||
| from ....host.const import InterfaceMethod, InterfaceType |  | ||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ....host.configuration import Interface |     from ....host.configuration import Interface | ||||||
| @@ -37,8 +37,8 @@ def get_connection_from_interface( | |||||||
|     # Generate/Update ID/name |     # Generate/Update ID/name | ||||||
|     if not name or not name.startswith("Supervisor"): |     if not name or not name.startswith("Supervisor"): | ||||||
|         name = f"Supervisor {interface.name}" |         name = f"Supervisor {interface.name}" | ||||||
|     if interface.type == InterfaceType.VLAN: |         if interface.type == InterfaceType.VLAN: | ||||||
|         name = f"{name}.{interface.vlan.id}" |             name = f"{name}.{interface.vlan.id}" | ||||||
|  |  | ||||||
|     if interface.type == InterfaceType.ETHERNET: |     if interface.type == InterfaceType.ETHERNET: | ||||||
|         iftype = "802-3-ethernet" |         iftype = "802-3-ethernet" | ||||||
| @@ -148,8 +148,8 @@ def get_connection_from_interface( | |||||||
|             wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY) |             wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY) | ||||||
|             wireless_security = {} |             wireless_security = {} | ||||||
|             if interface.wifi.auth == "wep": |             if interface.wifi.auth == "wep": | ||||||
|                 wireless_security["auth-alg"] = Variant("s", "none") |                 wireless_security["auth-alg"] = Variant("s", "open") | ||||||
|                 wireless_security["key-mgmt"] = Variant("s", "open") |                 wireless_security["key-mgmt"] = Variant("s", "none") | ||||||
|             elif interface.wifi.auth == "wpa-psk": |             elif interface.wifi.auth == "wpa-psk": | ||||||
|                 wireless_security["auth-alg"] = Variant("s", "open") |                 wireless_security["auth-alg"] = Variant("s", "open") | ||||||
|                 wireless_security["key-mgmt"] = Variant("s", "wpa-psk") |                 wireless_security["key-mgmt"] = Variant("s", "wpa-psk") | ||||||
|   | |||||||
| @@ -4,7 +4,7 @@ from typing import Any | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ...exceptions import DBusError, DBusInterfaceError | from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..const import DBUS_NAME_NM, DBUS_OBJECT_SETTINGS | from ..const import DBUS_NAME_NM, DBUS_OBJECT_SETTINGS | ||||||
| from ..interface import DBusInterface | from ..interface import DBusInterface | ||||||
| from ..network.setting import NetworkSetting | from ..network.setting import NetworkSetting | ||||||
| @@ -28,7 +28,7 @@ class NetworkManagerSettings(DBusInterface): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to Network Manager Settings") |             _LOGGER.warning("Can't connect to Network Manager Settings") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No Network Manager Settings support on the host. Local network functions have been disabled." |                 "No Network Manager Settings support on the host. Local network functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -1,10 +1,12 @@ | |||||||
| """D-Bus interface for rauc.""" | """D-Bus interface for rauc.""" | ||||||
|  |  | ||||||
|  | from ctypes import c_uint32, c_uint64 | ||||||
| import logging | import logging | ||||||
| from typing import Any | from typing import Any, NotRequired, TypedDict | ||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..utils.dbus import DBusSignalWrapper | from ..utils.dbus import DBusSignalWrapper | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_BOOT_SLOT, |     DBUS_ATTR_BOOT_SLOT, | ||||||
| @@ -23,6 +25,28 @@ from .utils import dbus_connected | |||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | SlotStatusDataType = TypedDict( | ||||||
|  |     "SlotStatusDataType", | ||||||
|  |     { | ||||||
|  |         "class": str, | ||||||
|  |         "type": str, | ||||||
|  |         "state": str, | ||||||
|  |         "device": str, | ||||||
|  |         "bundle.compatible": NotRequired[str], | ||||||
|  |         "sha256": NotRequired[str], | ||||||
|  |         "size": NotRequired[c_uint64], | ||||||
|  |         "installed.count": NotRequired[c_uint32], | ||||||
|  |         "bundle.version": NotRequired[str], | ||||||
|  |         "installed.timestamp": NotRequired[str], | ||||||
|  |         "status": NotRequired[str], | ||||||
|  |         "activated.count": NotRequired[c_uint32], | ||||||
|  |         "activated.timestamp": NotRequired[str], | ||||||
|  |         "boot-status": NotRequired[str], | ||||||
|  |         "bootname": NotRequired[str], | ||||||
|  |         "parent": NotRequired[str], | ||||||
|  |     }, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Rauc(DBusInterfaceProxy): | class Rauc(DBusInterfaceProxy): | ||||||
|     """Handle D-Bus interface for rauc.""" |     """Handle D-Bus interface for rauc.""" | ||||||
| @@ -49,7 +73,7 @@ class Rauc(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to rauc") |             _LOGGER.warning("Can't connect to rauc") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning("Host has no rauc support. OTA updates have been disabled.") |             _LOGGER.warning("Host has no rauc support. OTA updates have been disabled.") | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -83,7 +107,7 @@ class Rauc(DBusInterfaceProxy): | |||||||
|         await self.dbus.Installer.call_install(str(raucb_file)) |         await self.dbus.Installer.call_install(str(raucb_file)) | ||||||
|  |  | ||||||
|     @dbus_connected |     @dbus_connected | ||||||
|     async def get_slot_status(self) -> list[tuple[str, dict[str, Any]]]: |     async def get_slot_status(self) -> list[tuple[str, SlotStatusDataType]]: | ||||||
|         """Get slot status.""" |         """Get slot status.""" | ||||||
|         return await self.dbus.Installer.call_get_slot_status() |         return await self.dbus.Installer.call_get_slot_status() | ||||||
|  |  | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_CACHE_STATISTICS, |     DBUS_ATTR_CACHE_STATISTICS, | ||||||
|     DBUS_ATTR_CURRENT_DNS_SERVER, |     DBUS_ATTR_CURRENT_DNS_SERVER, | ||||||
| @@ -59,7 +59,7 @@ class Resolved(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-resolved.") |             _LOGGER.warning("Can't connect to systemd-resolved.") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "Host has no systemd-resolved support. DNS will not work correctly." |                 "Host has no systemd-resolved support. DNS will not work correctly." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -10,18 +10,22 @@ from ..exceptions import ( | |||||||
|     DBusError, |     DBusError, | ||||||
|     DBusFatalError, |     DBusFatalError, | ||||||
|     DBusInterfaceError, |     DBusInterfaceError, | ||||||
|  |     DBusServiceUnkownError, | ||||||
|     DBusSystemdNoSuchUnit, |     DBusSystemdNoSuchUnit, | ||||||
| ) | ) | ||||||
|  | from ..utils.dbus import DBusSignalWrapper | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_FINISH_TIMESTAMP, |     DBUS_ATTR_FINISH_TIMESTAMP, | ||||||
|     DBUS_ATTR_FIRMWARE_TIMESTAMP_MONOTONIC, |     DBUS_ATTR_FIRMWARE_TIMESTAMP_MONOTONIC, | ||||||
|     DBUS_ATTR_KERNEL_TIMESTAMP_MONOTONIC, |     DBUS_ATTR_KERNEL_TIMESTAMP_MONOTONIC, | ||||||
|     DBUS_ATTR_LOADER_TIMESTAMP_MONOTONIC, |     DBUS_ATTR_LOADER_TIMESTAMP_MONOTONIC, | ||||||
|     DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC, |     DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC, | ||||||
|  |     DBUS_ATTR_VIRTUALIZATION, | ||||||
|     DBUS_ERR_SYSTEMD_NO_SUCH_UNIT, |     DBUS_ERR_SYSTEMD_NO_SUCH_UNIT, | ||||||
|     DBUS_IFACE_SYSTEMD_MANAGER, |     DBUS_IFACE_SYSTEMD_MANAGER, | ||||||
|     DBUS_NAME_SYSTEMD, |     DBUS_NAME_SYSTEMD, | ||||||
|     DBUS_OBJECT_SYSTEMD, |     DBUS_OBJECT_SYSTEMD, | ||||||
|  |     DBUS_SIGNAL_PROPERTIES_CHANGED, | ||||||
|     StartUnitMode, |     StartUnitMode, | ||||||
|     StopUnitMode, |     StopUnitMode, | ||||||
|     UnitActiveState, |     UnitActiveState, | ||||||
| @@ -41,9 +45,7 @@ def systemd_errors(func): | |||||||
|             return await func(*args, **kwds) |             return await func(*args, **kwds) | ||||||
|         except DBusFatalError as err: |         except DBusFatalError as err: | ||||||
|             if err.type == DBUS_ERR_SYSTEMD_NO_SUCH_UNIT: |             if err.type == DBUS_ERR_SYSTEMD_NO_SUCH_UNIT: | ||||||
|                 # pylint: disable=raise-missing-from |                 raise DBusSystemdNoSuchUnit(str(err)) from None | ||||||
|                 raise DBusSystemdNoSuchUnit(str(err)) |  | ||||||
|                 # pylint: enable=raise-missing-from |  | ||||||
|             raise err |             raise err | ||||||
|  |  | ||||||
|     return wrapper |     return wrapper | ||||||
| @@ -65,6 +67,11 @@ class SystemdUnit(DBusInterface): | |||||||
|         """Get active state of the unit.""" |         """Get active state of the unit.""" | ||||||
|         return await self.dbus.Unit.get_active_state() |         return await self.dbus.Unit.get_active_state() | ||||||
|  |  | ||||||
|  |     @dbus_connected | ||||||
|  |     def properties_changed(self) -> DBusSignalWrapper: | ||||||
|  |         """Return signal wrapper for properties changed.""" | ||||||
|  |         return self.dbus.signal(DBUS_SIGNAL_PROPERTIES_CHANGED) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Systemd(DBusInterfaceProxy): | class Systemd(DBusInterfaceProxy): | ||||||
|     """Systemd function handler. |     """Systemd function handler. | ||||||
| @@ -86,7 +93,7 @@ class Systemd(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd") |             _LOGGER.warning("Can't connect to systemd") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No systemd support on the host. Host control has been disabled." |                 "No systemd support on the host. Host control has been disabled." | ||||||
|             ) |             ) | ||||||
| @@ -108,6 +115,12 @@ class Systemd(DBusInterfaceProxy): | |||||||
|         """Return the boot timestamp.""" |         """Return the boot timestamp.""" | ||||||
|         return self.properties[DBUS_ATTR_FINISH_TIMESTAMP] |         return self.properties[DBUS_ATTR_FINISH_TIMESTAMP] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     @dbus_property | ||||||
|  |     def virtualization(self) -> str: | ||||||
|  |         """Return virtualization hypervisor being used.""" | ||||||
|  |         return self.properties[DBUS_ATTR_VIRTUALIZATION] | ||||||
|  |  | ||||||
|     @dbus_connected |     @dbus_connected | ||||||
|     async def reboot(self) -> None: |     async def reboot(self) -> None: | ||||||
|         """Reboot host computer.""" |         """Reboot host computer.""" | ||||||
|   | |||||||
| @@ -4,7 +4,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..utils.dt import utc_from_timestamp | from ..utils.dt import utc_from_timestamp | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_NTP, |     DBUS_ATTR_NTP, | ||||||
| @@ -63,7 +63,7 @@ class TimeDate(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-timedate") |             _LOGGER.warning("Can't connect to systemd-timedate") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No timedate support on the host. Time/Date functions have been disabled." |                 "No timedate support on the host. Time/Date functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -6,16 +6,24 @@ from typing import Any | |||||||
| from awesomeversion import AwesomeVersion | from awesomeversion import AwesomeVersion | ||||||
| from dbus_fast.aio import MessageBus | from dbus_fast.aio import MessageBus | ||||||
|  |  | ||||||
| from ...exceptions import DBusError, DBusInterfaceError, DBusObjectError | from ...exceptions import ( | ||||||
|  |     DBusError, | ||||||
|  |     DBusInterfaceError, | ||||||
|  |     DBusObjectError, | ||||||
|  |     DBusServiceUnkownError, | ||||||
|  | ) | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DBUS_ATTR_SUPPORTED_FILESYSTEMS, |     DBUS_ATTR_SUPPORTED_FILESYSTEMS, | ||||||
|     DBUS_ATTR_VERSION, |     DBUS_ATTR_VERSION, | ||||||
|  |     DBUS_IFACE_BLOCK, | ||||||
|  |     DBUS_IFACE_DRIVE, | ||||||
|     DBUS_IFACE_UDISKS2_MANAGER, |     DBUS_IFACE_UDISKS2_MANAGER, | ||||||
|     DBUS_NAME_UDISKS2, |     DBUS_NAME_UDISKS2, | ||||||
|     DBUS_OBJECT_BASE, |     DBUS_OBJECT_BASE, | ||||||
|     DBUS_OBJECT_UDISKS2, |     DBUS_OBJECT_UDISKS2, | ||||||
|  |     DBUS_OBJECT_UDISKS2_MANAGER, | ||||||
| ) | ) | ||||||
| from ..interface import DBusInterfaceProxy, dbus_property | from ..interface import DBusInterface, DBusInterfaceProxy, dbus_property | ||||||
| from ..utils import dbus_connected | from ..utils import dbus_connected | ||||||
| from .block import UDisks2Block | from .block import UDisks2Block | ||||||
| from .const import UDISKS2_DEFAULT_OPTIONS | from .const import UDISKS2_DEFAULT_OPTIONS | ||||||
| @@ -25,7 +33,15 @@ from .drive import UDisks2Drive | |||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
| class UDisks2(DBusInterfaceProxy): | class UDisks2(DBusInterface): | ||||||
|  |     """Handle D-Bus interface for UDisks2 root object.""" | ||||||
|  |  | ||||||
|  |     name: str = DBUS_NAME_UDISKS2 | ||||||
|  |     bus_name: str = DBUS_NAME_UDISKS2 | ||||||
|  |     object_path: str = DBUS_OBJECT_UDISKS2 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class UDisks2Manager(DBusInterfaceProxy): | ||||||
|     """Handle D-Bus interface for UDisks2. |     """Handle D-Bus interface for UDisks2. | ||||||
|  |  | ||||||
|     http://storaged.org/doc/udisks2-api/latest/ |     http://storaged.org/doc/udisks2-api/latest/ | ||||||
| @@ -33,22 +49,36 @@ class UDisks2(DBusInterfaceProxy): | |||||||
|  |  | ||||||
|     name: str = DBUS_NAME_UDISKS2 |     name: str = DBUS_NAME_UDISKS2 | ||||||
|     bus_name: str = DBUS_NAME_UDISKS2 |     bus_name: str = DBUS_NAME_UDISKS2 | ||||||
|     object_path: str = DBUS_OBJECT_UDISKS2 |     object_path: str = DBUS_OBJECT_UDISKS2_MANAGER | ||||||
|     properties_interface: str = DBUS_IFACE_UDISKS2_MANAGER |     properties_interface: str = DBUS_IFACE_UDISKS2_MANAGER | ||||||
|  |  | ||||||
|     _block_devices: dict[str, UDisks2Block] = {} |     _block_devices: dict[str, UDisks2Block] = {} | ||||||
|     _drives: dict[str, UDisks2Drive] = {} |     _drives: dict[str, UDisks2Drive] = {} | ||||||
|  |  | ||||||
|  |     def __init__(self): | ||||||
|  |         """Initialize object.""" | ||||||
|  |         super().__init__() | ||||||
|  |         self.udisks2_object_manager = UDisks2() | ||||||
|  |  | ||||||
|     async def connect(self, bus: MessageBus): |     async def connect(self, bus: MessageBus): | ||||||
|         """Connect to D-Bus.""" |         """Connect to D-Bus.""" | ||||||
|         try: |         try: | ||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|  |             await self.udisks2_object_manager.connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to udisks2") |             _LOGGER.warning("Can't connect to udisks2") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No udisks2 support on the host. Host control has been disabled." |                 "No udisks2 support on the host. Host control has been disabled." | ||||||
|             ) |             ) | ||||||
|  |         else: | ||||||
|  |             # Register for signals on devices added/removed | ||||||
|  |             self.udisks2_object_manager.dbus.object_manager.on_interfaces_added( | ||||||
|  |                 self._interfaces_added | ||||||
|  |             ) | ||||||
|  |             self.udisks2_object_manager.dbus.object_manager.on_interfaces_removed( | ||||||
|  |                 self._interfaces_removed | ||||||
|  |             ) | ||||||
|  |  | ||||||
|     @dbus_connected |     @dbus_connected | ||||||
|     async def update(self, changed: dict[str, Any] | None = None) -> None: |     async def update(self, changed: dict[str, Any] | None = None) -> None: | ||||||
| @@ -156,11 +186,47 @@ class UDisks2(DBusInterfaceProxy): | |||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     async def _interfaces_added( | ||||||
|  |         self, object_path: str, properties: dict[str, dict[str, Any]] | ||||||
|  |     ) -> None: | ||||||
|  |         """Interfaces added to a UDisks2 object.""" | ||||||
|  |         if object_path in self._block_devices: | ||||||
|  |             await self._block_devices[object_path].update() | ||||||
|  |             return | ||||||
|  |         if object_path in self._drives: | ||||||
|  |             await self._drives[object_path].update() | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if DBUS_IFACE_BLOCK in properties: | ||||||
|  |             self._block_devices[object_path] = await UDisks2Block.new( | ||||||
|  |                 object_path, self.dbus.bus | ||||||
|  |             ) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if DBUS_IFACE_DRIVE in properties: | ||||||
|  |             self._drives[object_path] = await UDisks2Drive.new( | ||||||
|  |                 object_path, self.dbus.bus | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     async def _interfaces_removed( | ||||||
|  |         self, object_path: str, interfaces: list[str] | ||||||
|  |     ) -> None: | ||||||
|  |         """Interfaces removed from a UDisks2 object.""" | ||||||
|  |         if object_path in self._block_devices and DBUS_IFACE_BLOCK in interfaces: | ||||||
|  |             self._block_devices[object_path].shutdown() | ||||||
|  |             del self._block_devices[object_path] | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if object_path in self._drives and DBUS_IFACE_DRIVE in interfaces: | ||||||
|  |             self._drives[object_path].shutdown() | ||||||
|  |             del self._drives[object_path] | ||||||
|  |  | ||||||
|     def shutdown(self) -> None: |     def shutdown(self) -> None: | ||||||
|         """Shutdown the object and disconnect from D-Bus. |         """Shutdown the object and disconnect from D-Bus. | ||||||
|  |  | ||||||
|         This method is irreversible. |         This method is irreversible. | ||||||
|         """ |         """ | ||||||
|  |         self.udisks2_object_manager.shutdown() | ||||||
|         for block_device in self.block_devices: |         for block_device in self.block_devices: | ||||||
|             block_device.shutdown() |             block_device.shutdown() | ||||||
|         for drive in self.drives: |         for drive in self.drives: | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| """Interface to UDisks2 Drive over D-Bus.""" | """Interface to UDisks2 Drive over D-Bus.""" | ||||||
|  |  | ||||||
| from datetime import datetime, timezone | from datetime import UTC, datetime | ||||||
|  |  | ||||||
| from dbus_fast.aio import MessageBus | from dbus_fast.aio import MessageBus | ||||||
|  |  | ||||||
| @@ -95,7 +95,7 @@ class UDisks2Drive(DBusInterfaceProxy): | |||||||
|         """Return time drive first detected.""" |         """Return time drive first detected.""" | ||||||
|         return datetime.fromtimestamp( |         return datetime.fromtimestamp( | ||||||
|             self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6 |             self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6 | ||||||
|         ).astimezone(timezone.utc) |         ).astimezone(UTC) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     @dbus_property |     @dbus_property | ||||||
|   | |||||||
| @@ -7,14 +7,12 @@ from typing import TYPE_CHECKING, Any | |||||||
| from uuid import UUID, uuid4 | from uuid import UUID, uuid4 | ||||||
|  |  | ||||||
| import attr | import attr | ||||||
| import voluptuous as vol |  | ||||||
| from voluptuous.humanize import humanize_error |  | ||||||
|  |  | ||||||
| from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY | from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY | ||||||
| from ..coresys import CoreSys, CoreSysAttributes | from ..coresys import CoreSys, CoreSysAttributes | ||||||
| from ..exceptions import DiscoveryError, HomeAssistantAPIError | from ..exceptions import HomeAssistantAPIError | ||||||
| from ..utils.common import FileConfiguration | from ..utils.common import FileConfiguration | ||||||
| from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config | from .validate import SCHEMA_DISCOVERY_CONFIG | ||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ..addons.addon import Addon |     from ..addons.addon import Addon | ||||||
| @@ -75,12 +73,6 @@ class Discovery(CoreSysAttributes, FileConfiguration): | |||||||
|  |  | ||||||
|     def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message: |     def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message: | ||||||
|         """Send a discovery message to Home Assistant.""" |         """Send a discovery message to Home Assistant.""" | ||||||
|         try: |  | ||||||
|             config = valid_discovery_config(service, config) |  | ||||||
|         except vol.Invalid as err: |  | ||||||
|             _LOGGER.error("Invalid discovery %s config", humanize_error(config, err)) |  | ||||||
|             raise DiscoveryError() from err |  | ||||||
|  |  | ||||||
|         # Create message |         # Create message | ||||||
|         message = Message(addon.slug, service, config) |         message = Message(addon.slug, service, config) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1 +0,0 @@ | |||||||
| """Discovery service modules.""" |  | ||||||
| @@ -1,9 +0,0 @@ | |||||||
| """Discovery service for AdGuard.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     {vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port} |  | ||||||
| ) |  | ||||||
| @@ -1,9 +0,0 @@ | |||||||
| """Discovery service for Almond.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     {vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port} |  | ||||||
| ) |  | ||||||
| @@ -1,14 +0,0 @@ | |||||||
| """Discovery service for MQTT.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_API_KEY, ATTR_HOST, ATTR_PORT, ATTR_SERIAL |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         vol.Required(ATTR_HOST): str, |  | ||||||
|         vol.Required(ATTR_PORT): network_port, |  | ||||||
|         vol.Required(ATTR_SERIAL): str, |  | ||||||
|         vol.Required(ATTR_API_KEY): str, |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,9 +0,0 @@ | |||||||
| """Discovery service for the ESPHome Dashboard.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     {vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port} |  | ||||||
| ) |  | ||||||
| @@ -1,16 +0,0 @@ | |||||||
| """Discovery service for HomeMatic.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         str: vol.Schema( |  | ||||||
|             { |  | ||||||
|                 vol.Required(ATTR_HOST): str, |  | ||||||
|                 vol.Required(ATTR_PORT): network_port, |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| """Discovery service for Matter Server.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         vol.Required(ATTR_HOST): str, |  | ||||||
|         vol.Required(ATTR_PORT): network_port, |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,6 +0,0 @@ | |||||||
| """Discovery service for motionEye.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ..const import ATTR_URL |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema({vol.Required(ATTR_URL): str}) |  | ||||||
| @@ -1,26 +0,0 @@ | |||||||
| """Discovery service for MQTT.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ( |  | ||||||
|     ATTR_HOST, |  | ||||||
|     ATTR_PASSWORD, |  | ||||||
|     ATTR_PORT, |  | ||||||
|     ATTR_PROTOCOL, |  | ||||||
|     ATTR_SSL, |  | ||||||
|     ATTR_USERNAME, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         vol.Required(ATTR_HOST): str, |  | ||||||
|         vol.Required(ATTR_PORT): network_port, |  | ||||||
|         vol.Optional(ATTR_USERNAME): str, |  | ||||||
|         vol.Optional(ATTR_PASSWORD): str, |  | ||||||
|         vol.Optional(ATTR_SSL, default=False): vol.Boolean(), |  | ||||||
|         vol.Optional(ATTR_PROTOCOL, default="3.1.1"): vol.All( |  | ||||||
|             str, vol.In(["3.1", "3.1.1"]) |  | ||||||
|         ), |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| """Discovery service for OpenThread Border Router.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         vol.Required(ATTR_HOST): str, |  | ||||||
|         vol.Required(ATTR_PORT): network_port, |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,15 +0,0 @@ | |||||||
| """Discovery service for OpenZwave MQTT.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PASSWORD, ATTR_PORT, ATTR_USERNAME |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         vol.Required(ATTR_HOST): str, |  | ||||||
|         vol.Required(ATTR_PORT): network_port, |  | ||||||
|         vol.Required(ATTR_USERNAME): str, |  | ||||||
|         vol.Required(ATTR_PASSWORD): str, |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,9 +0,0 @@ | |||||||
| """Discovery service for RTSPtoWebRTC.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     {vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port} |  | ||||||
| ) |  | ||||||
| @@ -1,9 +0,0 @@ | |||||||
| """Discovery service for UniFi.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     {vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port} |  | ||||||
| ) |  | ||||||
| @@ -1,14 +0,0 @@ | |||||||
| """Discovery service for VLC Telnet.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PASSWORD, ATTR_PORT |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         vol.Required(ATTR_HOST): str, |  | ||||||
|         vol.Required(ATTR_PORT): network_port, |  | ||||||
|         vol.Required(ATTR_PASSWORD): str, |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,25 +0,0 @@ | |||||||
| """Discovery service for the Wyoming Protocol integration.""" |  | ||||||
| from typing import Any, cast |  | ||||||
| from urllib.parse import urlparse |  | ||||||
|  |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ..const import ATTR_URI |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_uri(value: Any) -> str: |  | ||||||
|     """Validate an Wyoming URI. |  | ||||||
|  |  | ||||||
|     Currently accepts TCP URIs, can extended |  | ||||||
|     to accept UNIX sockets in the future. |  | ||||||
|     """ |  | ||||||
|     uri_value = str(value) |  | ||||||
|  |  | ||||||
|     if urlparse(uri_value).scheme == "tcp": |  | ||||||
|         # pylint: disable-next=no-value-for-parameter |  | ||||||
|         return cast(str, vol.Schema(vol.Url())(uri_value)) |  | ||||||
|  |  | ||||||
|     raise vol.Invalid("invalid Wyoming Protocol URI") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SCHEMA = vol.Schema({vol.Required(ATTR_URI): validate_uri}) |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| """Discovery service for Zwave JS.""" |  | ||||||
| import voluptuous as vol |  | ||||||
|  |  | ||||||
| from ...validate import network_port |  | ||||||
| from ..const import ATTR_HOST, ATTR_PORT |  | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA = vol.Schema( |  | ||||||
|     { |  | ||||||
|         vol.Required(ATTR_HOST): str, |  | ||||||
|         vol.Required(ATTR_PORT): network_port, |  | ||||||
|     } |  | ||||||
| ) |  | ||||||
| @@ -1,6 +1,4 @@ | |||||||
| """Validate services schema.""" | """Validate services schema.""" | ||||||
| from importlib import import_module |  | ||||||
| from pathlib import Path |  | ||||||
|  |  | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| @@ -8,25 +6,6 @@ from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_ | |||||||
| from ..utils.validate import schema_or | from ..utils.validate import schema_or | ||||||
| from ..validate import uuid_match | from ..validate import uuid_match | ||||||
|  |  | ||||||
|  |  | ||||||
| def valid_discovery_service(service): |  | ||||||
|     """Validate service name.""" |  | ||||||
|     service_file = Path(__file__).parent.joinpath(f"services/{service}.py") |  | ||||||
|     if not service_file.exists(): |  | ||||||
|         raise vol.Invalid(f"Service {service} not found") from None |  | ||||||
|     return service |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def valid_discovery_config(service, config): |  | ||||||
|     """Validate service name.""" |  | ||||||
|     try: |  | ||||||
|         service_mod = import_module(f".services.{service}", "supervisor.discovery") |  | ||||||
|     except ImportError: |  | ||||||
|         raise vol.Invalid(f"Service {service} not found") from None |  | ||||||
|  |  | ||||||
|     return service_mod.SCHEMA(config) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SCHEMA_DISCOVERY = vol.Schema( | SCHEMA_DISCOVERY = vol.Schema( | ||||||
|     [ |     [ | ||||||
|         vol.Schema( |         vol.Schema( | ||||||
|   | |||||||
| @@ -15,18 +15,10 @@ from docker.types import Mount | |||||||
| import requests | import requests | ||||||
|  |  | ||||||
| from ..addons.build import AddonBuild | from ..addons.build import AddonBuild | ||||||
|  | from ..addons.const import MappingType | ||||||
| from ..bus import EventListener | from ..bus import EventListener | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DOCKER_CPU_RUNTIME_ALLOCATION, |     DOCKER_CPU_RUNTIME_ALLOCATION, | ||||||
|     MAP_ADDON_CONFIG, |  | ||||||
|     MAP_ADDONS, |  | ||||||
|     MAP_ALL_ADDON_CONFIGS, |  | ||||||
|     MAP_BACKUP, |  | ||||||
|     MAP_CONFIG, |  | ||||||
|     MAP_HOMEASSISTANT_CONFIG, |  | ||||||
|     MAP_MEDIA, |  | ||||||
|     MAP_SHARE, |  | ||||||
|     MAP_SSL, |  | ||||||
|     SECURITY_DISABLE, |     SECURITY_DISABLE, | ||||||
|     SECURITY_PROFILE, |     SECURITY_PROFILE, | ||||||
|     SYSTEMD_JOURNAL_PERSISTENT, |     SYSTEMD_JOURNAL_PERSISTENT, | ||||||
| @@ -241,10 +233,10 @@ class DockerAddon(DockerInterface): | |||||||
|         tmpfs = {} |         tmpfs = {} | ||||||
|  |  | ||||||
|         if self.addon.with_tmpfs: |         if self.addon.with_tmpfs: | ||||||
|             tmpfs["/tmp"] = "" |             tmpfs["/tmp"] = ""  # noqa: S108 | ||||||
|  |  | ||||||
|         if not self.addon.host_ipc: |         if not self.addon.host_ipc: | ||||||
|             tmpfs["/dev/shm"] = "" |             tmpfs["/dev/shm"] = ""  # noqa: S108 | ||||||
|  |  | ||||||
|         # Return None if no tmpfs is present |         # Return None if no tmpfs is present | ||||||
|         if tmpfs: |         if tmpfs: | ||||||
| @@ -332,24 +324,28 @@ class DockerAddon(DockerInterface): | |||||||
|         """Return mounts for container.""" |         """Return mounts for container.""" | ||||||
|         addon_mapping = self.addon.map_volumes |         addon_mapping = self.addon.map_volumes | ||||||
|  |  | ||||||
|  |         target_data_path = "" | ||||||
|  |         if MappingType.DATA in addon_mapping: | ||||||
|  |             target_data_path = addon_mapping[MappingType.DATA].path | ||||||
|  |  | ||||||
|         mounts = [ |         mounts = [ | ||||||
|             MOUNT_DEV, |             MOUNT_DEV, | ||||||
|             Mount( |             Mount( | ||||||
|                 type=MountType.BIND, |                 type=MountType.BIND, | ||||||
|                 source=self.addon.path_extern_data.as_posix(), |                 source=self.addon.path_extern_data.as_posix(), | ||||||
|                 target="/data", |                 target=target_data_path or "/data", | ||||||
|                 read_only=False, |                 read_only=False, | ||||||
|             ), |             ), | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|         # setup config mappings |         # setup config mappings | ||||||
|         if MAP_CONFIG in addon_mapping: |         if MappingType.CONFIG in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_homeassistant.as_posix(), |                     source=self.sys_config.path_extern_homeassistant.as_posix(), | ||||||
|                     target="/config", |                     target=addon_mapping[MappingType.CONFIG].path or "/config", | ||||||
|                     read_only=addon_mapping[MAP_CONFIG], |                     read_only=addon_mapping[MappingType.CONFIG].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
| @@ -360,80 +356,85 @@ class DockerAddon(DockerInterface): | |||||||
|                     Mount( |                     Mount( | ||||||
|                         type=MountType.BIND, |                         type=MountType.BIND, | ||||||
|                         source=self.addon.path_extern_config.as_posix(), |                         source=self.addon.path_extern_config.as_posix(), | ||||||
|                         target="/config", |                         target=addon_mapping[MappingType.ADDON_CONFIG].path | ||||||
|                         read_only=addon_mapping[MAP_ADDON_CONFIG], |                         or "/config", | ||||||
|  |                         read_only=addon_mapping[MappingType.ADDON_CONFIG].read_only, | ||||||
|                     ) |                     ) | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|             # Map Home Assistant config in new way |             # Map Home Assistant config in new way | ||||||
|             if MAP_HOMEASSISTANT_CONFIG in addon_mapping: |             if MappingType.HOMEASSISTANT_CONFIG in addon_mapping: | ||||||
|                 mounts.append( |                 mounts.append( | ||||||
|                     Mount( |                     Mount( | ||||||
|                         type=MountType.BIND, |                         type=MountType.BIND, | ||||||
|                         source=self.sys_config.path_extern_homeassistant.as_posix(), |                         source=self.sys_config.path_extern_homeassistant.as_posix(), | ||||||
|                         target="/homeassistant", |                         target=addon_mapping[MappingType.HOMEASSISTANT_CONFIG].path | ||||||
|                         read_only=addon_mapping[MAP_HOMEASSISTANT_CONFIG], |                         or "/homeassistant", | ||||||
|  |                         read_only=addon_mapping[ | ||||||
|  |                             MappingType.HOMEASSISTANT_CONFIG | ||||||
|  |                         ].read_only, | ||||||
|                     ) |                     ) | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|         if MAP_ALL_ADDON_CONFIGS in addon_mapping: |         if MappingType.ALL_ADDON_CONFIGS in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_addon_configs.as_posix(), |                     source=self.sys_config.path_extern_addon_configs.as_posix(), | ||||||
|                     target="/addon_configs", |                     target=addon_mapping[MappingType.ALL_ADDON_CONFIGS].path | ||||||
|                     read_only=addon_mapping[MAP_ALL_ADDON_CONFIGS], |                     or "/addon_configs", | ||||||
|  |                     read_only=addon_mapping[MappingType.ALL_ADDON_CONFIGS].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_SSL in addon_mapping: |         if MappingType.SSL in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_ssl.as_posix(), |                     source=self.sys_config.path_extern_ssl.as_posix(), | ||||||
|                     target="/ssl", |                     target=addon_mapping[MappingType.SSL].path or "/ssl", | ||||||
|                     read_only=addon_mapping[MAP_SSL], |                     read_only=addon_mapping[MappingType.SSL].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_ADDONS in addon_mapping: |         if MappingType.ADDONS in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_addons_local.as_posix(), |                     source=self.sys_config.path_extern_addons_local.as_posix(), | ||||||
|                     target="/addons", |                     target=addon_mapping[MappingType.ADDONS].path or "/addons", | ||||||
|                     read_only=addon_mapping[MAP_ADDONS], |                     read_only=addon_mapping[MappingType.ADDONS].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_BACKUP in addon_mapping: |         if MappingType.BACKUP in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_backup.as_posix(), |                     source=self.sys_config.path_extern_backup.as_posix(), | ||||||
|                     target="/backup", |                     target=addon_mapping[MappingType.BACKUP].path or "/backup", | ||||||
|                     read_only=addon_mapping[MAP_BACKUP], |                     read_only=addon_mapping[MappingType.BACKUP].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_SHARE in addon_mapping: |         if MappingType.SHARE in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_share.as_posix(), |                     source=self.sys_config.path_extern_share.as_posix(), | ||||||
|                     target="/share", |                     target=addon_mapping[MappingType.SHARE].path or "/share", | ||||||
|                     read_only=addon_mapping[MAP_SHARE], |                     read_only=addon_mapping[MappingType.SHARE].read_only, | ||||||
|                     propagation=PropagationMode.RSLAVE, |                     propagation=PropagationMode.RSLAVE, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_MEDIA in addon_mapping: |         if MappingType.MEDIA in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_media.as_posix(), |                     source=self.sys_config.path_extern_media.as_posix(), | ||||||
|                     target="/media", |                     target=addon_mapping[MappingType.MEDIA].path or "/media", | ||||||
|                     read_only=addon_mapping[MAP_MEDIA], |                     read_only=addon_mapping[MappingType.MEDIA].read_only, | ||||||
|                     propagation=PropagationMode.RSLAVE, |                     propagation=PropagationMode.RSLAVE, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
| @@ -602,7 +603,11 @@ class DockerAddon(DockerInterface): | |||||||
|         on_condition=DockerJobError, |         on_condition=DockerJobError, | ||||||
|     ) |     ) | ||||||
|     async def update( |     async def update( | ||||||
|         self, version: AwesomeVersion, image: str | None = None, latest: bool = False |         self, | ||||||
|  |         version: AwesomeVersion, | ||||||
|  |         image: str | None = None, | ||||||
|  |         latest: bool = False, | ||||||
|  |         arch: CpuArch | None = None, | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         """Update a docker image.""" |         """Update a docker image.""" | ||||||
|         image = image or self.image |         image = image or self.image | ||||||
| @@ -613,7 +618,11 @@ class DockerAddon(DockerInterface): | |||||||
|  |  | ||||||
|         # Update docker image |         # Update docker image | ||||||
|         await self.install( |         await self.install( | ||||||
|             version, image=image, latest=latest, need_build=self.addon.latest_need_build |             version, | ||||||
|  |             image=image, | ||||||
|  |             latest=latest, | ||||||
|  |             arch=arch, | ||||||
|  |             need_build=self.addon.latest_need_build, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
| @@ -632,11 +641,11 @@ class DockerAddon(DockerInterface): | |||||||
|     ) -> None: |     ) -> None: | ||||||
|         """Pull Docker image or build it.""" |         """Pull Docker image or build it.""" | ||||||
|         if need_build is None and self.addon.need_build or need_build: |         if need_build is None and self.addon.need_build or need_build: | ||||||
|             await self._build(version) |             await self._build(version, image) | ||||||
|         else: |         else: | ||||||
|             await super().install(version, image, latest, arch) |             await super().install(version, image, latest, arch) | ||||||
|  |  | ||||||
|     async def _build(self, version: AwesomeVersion) -> None: |     async def _build(self, version: AwesomeVersion, image: str | None = None) -> None: | ||||||
|         """Build a Docker container.""" |         """Build a Docker container.""" | ||||||
|         build_env = AddonBuild(self.coresys, self.addon) |         build_env = AddonBuild(self.coresys, self.addon) | ||||||
|         if not build_env.is_valid: |         if not build_env.is_valid: | ||||||
| @@ -648,7 +657,7 @@ class DockerAddon(DockerInterface): | |||||||
|             image, log = await self.sys_run_in_executor( |             image, log = await self.sys_run_in_executor( | ||||||
|                 self.sys_docker.images.build, |                 self.sys_docker.images.build, | ||||||
|                 use_config_proxy=False, |                 use_config_proxy=False, | ||||||
|                 **build_env.get_docker_args(version), |                 **build_env.get_docker_args(version, image), | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|             _LOGGER.debug("Build %s:%s done: %s", self.image, version, log) |             _LOGGER.debug("Build %s:%s done: %s", self.image, version, log) | ||||||
|   | |||||||
| @@ -74,6 +74,7 @@ MOUNT_DBUS = Mount( | |||||||
|     type=MountType.BIND, source="/run/dbus", target="/run/dbus", read_only=True |     type=MountType.BIND, source="/run/dbus", target="/run/dbus", read_only=True | ||||||
| ) | ) | ||||||
| MOUNT_DEV = Mount(type=MountType.BIND, source="/dev", target="/dev", read_only=True) | MOUNT_DEV = Mount(type=MountType.BIND, source="/dev", target="/dev", read_only=True) | ||||||
|  | MOUNT_DEV.setdefault("BindOptions", {})["ReadOnlyNonRecursive"] = True | ||||||
| MOUNT_DOCKER = Mount( | MOUNT_DOCKER = Mount( | ||||||
|     type=MountType.BIND, |     type=MountType.BIND, | ||||||
|     source="/run/docker.sock", |     source="/run/docker.sock", | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ | |||||||
| from collections.abc import Awaitable | from collections.abc import Awaitable | ||||||
| from ipaddress import IPv4Address | from ipaddress import IPv4Address | ||||||
| import logging | import logging | ||||||
|  | import re | ||||||
|  |  | ||||||
| from awesomeversion import AwesomeVersion, AwesomeVersionCompareException | from awesomeversion import AwesomeVersion, AwesomeVersionCompareException | ||||||
| from docker.types import Mount | from docker.types import Mount | ||||||
| @@ -28,6 +29,7 @@ from .interface import CommandReturn, DockerInterface | |||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
| _VERIFY_TRUST: AwesomeVersion = AwesomeVersion("2021.5.0") | _VERIFY_TRUST: AwesomeVersion = AwesomeVersion("2021.5.0") | ||||||
| _HASS_DOCKER_NAME: str = "homeassistant" | _HASS_DOCKER_NAME: str = "homeassistant" | ||||||
|  | ENV_S6_GRACETIME = re.compile(r"^S6_SERVICES_GRACETIME=([0-9]+)$") | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerHomeAssistant(DockerInterface): | class DockerHomeAssistant(DockerInterface): | ||||||
| @@ -53,9 +55,15 @@ class DockerHomeAssistant(DockerInterface): | |||||||
|     @property |     @property | ||||||
|     def timeout(self) -> int: |     def timeout(self) -> int: | ||||||
|         """Return timeout for Docker actions.""" |         """Return timeout for Docker actions.""" | ||||||
|         # Synchronized homeassistant's S6_SERVICES_GRACETIME |         # Use S6_SERVICES_GRACETIME to avoid killing Home Assistant Core, see | ||||||
|         # to avoid killing Home Assistant Core |         # https://github.com/home-assistant/core/tree/dev/Dockerfile | ||||||
|         return 220 + 20 |         if self.meta_config and "Env" in self.meta_config: | ||||||
|  |             for env in self.meta_config["Env"]: | ||||||
|  |                 if match := ENV_S6_GRACETIME.match(env): | ||||||
|  |                     return 20 + int(int(match.group(1)) / 1000) | ||||||
|  |  | ||||||
|  |         # Fallback - as of 2024.3, S6 SERVICES_GRACETIME was set to 24000 | ||||||
|  |         return 260 | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def ip_address(self) -> IPv4Address: |     def ip_address(self) -> IPv4Address: | ||||||
| @@ -174,7 +182,7 @@ class DockerHomeAssistant(DockerInterface): | |||||||
|                 ENV_TOKEN: self.sys_homeassistant.supervisor_token, |                 ENV_TOKEN: self.sys_homeassistant.supervisor_token, | ||||||
|                 ENV_TOKEN_OLD: self.sys_homeassistant.supervisor_token, |                 ENV_TOKEN_OLD: self.sys_homeassistant.supervisor_token, | ||||||
|             }, |             }, | ||||||
|             tmpfs={"/tmp": ""}, |             tmpfs={"/tmp": ""},  # noqa: S108 | ||||||
|             oom_score_adj=-300, |             oom_score_adj=-300, | ||||||
|         ) |         ) | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|   | |||||||
| @@ -14,6 +14,7 @@ from awesomeversion import AwesomeVersion | |||||||
| from awesomeversion.strategy import AwesomeVersionStrategy | from awesomeversion.strategy import AwesomeVersionStrategy | ||||||
| import docker | import docker | ||||||
| from docker.models.containers import Container | from docker.models.containers import Container | ||||||
|  | from docker.models.images import Image | ||||||
| import requests | import requests | ||||||
|  |  | ||||||
| from ..const import ( | from ..const import ( | ||||||
| @@ -438,6 +439,44 @@ class DockerInterface(JobGroup): | |||||||
|         ) |         ) | ||||||
|         self._meta = None |         self._meta = None | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="docker_interface_check_image", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=DockerJobError, | ||||||
|  |     ) | ||||||
|  |     async def check_image( | ||||||
|  |         self, | ||||||
|  |         version: AwesomeVersion, | ||||||
|  |         expected_image: str, | ||||||
|  |         expected_arch: CpuArch | None = None, | ||||||
|  |     ) -> None: | ||||||
|  |         """Check we have expected image with correct arch.""" | ||||||
|  |         expected_arch = expected_arch or self.sys_arch.supervisor | ||||||
|  |         image_name = f"{expected_image}:{version!s}" | ||||||
|  |         if self.image == expected_image: | ||||||
|  |             try: | ||||||
|  |                 image: Image = await self.sys_run_in_executor( | ||||||
|  |                     self.sys_docker.images.get, image_name | ||||||
|  |                 ) | ||||||
|  |             except (docker.errors.DockerException, requests.RequestException) as err: | ||||||
|  |                 raise DockerError( | ||||||
|  |                     f"Could not get {image_name} for check due to: {err!s}", | ||||||
|  |                     _LOGGER.error, | ||||||
|  |                 ) from err | ||||||
|  |  | ||||||
|  |             image_arch = f"{image.attrs['Os']}/{image.attrs['Architecture']}" | ||||||
|  |             if "Variant" in image.attrs: | ||||||
|  |                 image_arch = f"{image_arch}/{image.attrs['Variant']}" | ||||||
|  |  | ||||||
|  |             # If we have an image and its the right arch, all set | ||||||
|  |             if MAP_ARCH[expected_arch] == image_arch: | ||||||
|  |                 return | ||||||
|  |  | ||||||
|  |         # We're missing the image we need. Stop and clean up what we have then pull the right one | ||||||
|  |         with suppress(DockerError): | ||||||
|  |             await self.remove() | ||||||
|  |         await self.install(version, expected_image, arch=expected_arch) | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|         name="docker_interface_update", |         name="docker_interface_update", | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|   | |||||||
| @@ -177,6 +177,11 @@ class DockerAPI: | |||||||
|         if dns: |         if dns: | ||||||
|             kwargs["dns"] = [str(self.network.dns)] |             kwargs["dns"] = [str(self.network.dns)] | ||||||
|             kwargs["dns_search"] = [DNS_SUFFIX] |             kwargs["dns_search"] = [DNS_SUFFIX] | ||||||
|  |             # CoreDNS forward plug-in fails in ~6s, then fallback triggers. | ||||||
|  |             # However, the default timeout of glibc and musl is 5s. Increase | ||||||
|  |             # default timeout to make sure CoreDNS fallback is working | ||||||
|  |             # on first query. | ||||||
|  |             kwargs["dns_opt"] = ["timeout:10"] | ||||||
|             if hostname: |             if hostname: | ||||||
|                 kwargs["domainname"] = DNS_SUFFIX |                 kwargs["domainname"] = DNS_SUFFIX | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,4 +1,6 @@ | |||||||
| """Supervisor docker monitor based on events.""" | """Supervisor docker monitor based on events.""" | ||||||
|  |  | ||||||
|  | from contextlib import suppress | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
| import logging | import logging | ||||||
| from threading import Thread | from threading import Thread | ||||||
| @@ -47,10 +49,8 @@ class DockerMonitor(CoreSysAttributes, Thread): | |||||||
|     async def unload(self): |     async def unload(self): | ||||||
|         """Stop docker events monitor.""" |         """Stop docker events monitor.""" | ||||||
|         self._events.close() |         self._events.close() | ||||||
|         try: |         with suppress(RuntimeError): | ||||||
|             self.join(timeout=5) |             self.join(timeout=5) | ||||||
|         except RuntimeError: |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Stopped docker events monitor") |         _LOGGER.info("Stopped docker events monitor") | ||||||
|  |  | ||||||
|   | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user