mirror of
				https://github.com/home-assistant/supervisor.git
				synced 2025-10-30 22:19:43 +00:00 
			
		
		
		
	Compare commits
	
		
			175 Commits
		
	
	
		
			2023.09.2
			...
			faster_bac
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | af3256e41e | ||
|   | a163121ad4 | ||
|   | eb85be2770 | ||
|   | 2da27937a5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2a29b801a4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 57e65714b0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0ae40cb51c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ddd195dfc6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 54b9f23ec5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 242dd3e626 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1b8acb5b60 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a7ab96ab12 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 06ab11cf87 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1410a1b06e | ||
|   | 5baf19f7a3 | ||
|   | 6c66a7ba17 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 37b6e09475 | ||
|   | e08c8ca26d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2c09e7929f | ||
|   | 3e760f0d85 | ||
|   | 3cc6bd19ad | ||
|   | b7ddfba71d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 32f21d208f | ||
|   | ed7edd9fe0 | ||
|   | fd3c995c7c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c0d1a2d53b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 76bc3015a7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ad2896243b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d0dcded42d | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a0dfa01287 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4ec5c90180 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a0c813bfc1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5f7b3a7087 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6426f02a2c | ||
|   | 7fef92c480 | ||
|   | c64744dedf | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 72a2088931 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | db54556b0f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a2653d8462 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ef778238f6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 4cc0ddc35d | ||
|   | a0429179a0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5cfb45c668 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a53b7041f5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f534fae293 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f7cbd968d2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 844d76290c | ||
|   | 8c8122eee0 | ||
|   | d63f0d5e0b | ||
|   | 96f4ba5d25 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 72e64676da | ||
|   | 883e54f989 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c2d4be3304 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | de737ddb91 | ||
|   | 11ec6dd9ac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | df7541e397 | ||
|   | 95ac53d780 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e8c4b32a65 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | eca535c978 | ||
|   | 9088810b49 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 172a7053ed | ||
|   | 3d5bd2adef | ||
|   | cb03d039f4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | bb31b1bc6e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 727532858e | ||
|   | c0868d9dac | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ce26e1dac6 | ||
|   | c74f87ca12 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 043111b91c | ||
|   | 5c579e557c | ||
|   | f8f51740c1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 176b63df52 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e1979357a5 | ||
|   | 030527a4f2 | ||
|   | cca74da1f3 | ||
|   | 928aff342f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 60a97235df | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c77779cf9d | ||
|   | 9351796ba8 | ||
|   | bef0f023d4 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3116f183f5 | ||
|   | 16b71a22d1 | ||
|   | 5f4581042c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6976a4cf2e | ||
|   | 68d86b3b7b | ||
|   | d7d34d36c8 | ||
|   | 68da328cc5 | ||
|   | 78870186d7 | ||
|   | d634273b48 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2d970eee02 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1f0ea3c6f7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d736913f7f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3e95a9d282 | ||
|   | 7cd7259992 | ||
|   | 87385cf28e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3a00c94325 | ||
|   | 38d5d2307f | ||
|   | a0c12e7228 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b6625ad909 | ||
|   | 6f01341055 | ||
|   | 6762a4153a | ||
|   | 31200df89f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 18e422ca77 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 1b362716e3 | ||
|   | 1e49129197 | ||
|   | a8f818fca5 | ||
|   | 0f600da096 | ||
|   | b04efe4eac | ||
|   | 7361d39231 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 059c0df16c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6f6b849335 | ||
|   | a390500309 | ||
|   | 7c576da32c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 6d021c1659 | ||
|   | 37c1c89d44 | ||
|   | 010043f116 | ||
|   | b1010c3c61 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7f0204bfc3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a508cc5efd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 65c90696d5 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b9f47898d6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 26f554e46a | ||
|   | b57889c84f | ||
|   | 77fd1b4017 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ab6745bc99 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a5ea3cae72 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8bcd1b4efd | ||
|   | a24657e565 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b7721420fa | ||
|   | 6c564fe4fd | ||
|   | 012bfd7e6c | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a70f81aa01 | ||
|   | 1376a38de5 | ||
|   | 1827ecda65 | ||
|   | 994c981228 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 5bbfbf44ae | ||
|   | ace58ba735 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f9840306a0 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 322b3bbb4e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 501318f468 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 0234f38b23 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8743e0072f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | a79e06afa7 | ||
|   | 682b8e0535 | ||
|   | d70aa5f9a9 | ||
|   | 1c815dcad1 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | afa467a32b | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 274218d48e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7e73df26ab | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ef8fc80c95 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 05c39144e3 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | f5cd35af47 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | c69ecdafd0 | ||
|   | fa90c247ec | ||
|   | 0cd7bd47bb | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 36d48d19fc | ||
|   | 9322b68d47 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e11ff64b15 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 3776dabfcf | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d4e5831f0f | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 7b3b478e88 | ||
|   | f5afe13e91 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 49ce468d83 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | b26551c812 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 394ba580d2 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2f7a54f5fd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 360e085926 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 042921925d | ||
|   | dcf024387b | ||
|   | e1232bc9e7 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | d96598b5dd | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 2605f85668 | ||
|   | 2c8e6ca0cd | ||
|   | 0225f574be | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 34090bf2eb | 
| @@ -7,13 +7,15 @@ | |||||||
|   "appPort": ["9123:8123", "7357:4357"], |   "appPort": ["9123:8123", "7357:4357"], | ||||||
|   "postCreateCommand": "bash devcontainer_bootstrap", |   "postCreateCommand": "bash devcontainer_bootstrap", | ||||||
|   "runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"], |   "runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"], | ||||||
|  |   "customizations": { | ||||||
|  |     "vscode": { | ||||||
|       "extensions": [ |       "extensions": [ | ||||||
|         "ms-python.python", |         "ms-python.python", | ||||||
|  |         "ms-python.pylint", | ||||||
|         "ms-python.vscode-pylance", |         "ms-python.vscode-pylance", | ||||||
|         "visualstudioexptteam.vscodeintellicode", |         "visualstudioexptteam.vscodeintellicode", | ||||||
|         "esbenp.prettier-vscode" |         "esbenp.prettier-vscode" | ||||||
|       ], |       ], | ||||||
|   "mounts": ["type=volume,target=/var/lib/docker"], |  | ||||||
|       "settings": { |       "settings": { | ||||||
|         "terminal.integrated.profiles.linux": { |         "terminal.integrated.profiles.linux": { | ||||||
|           "zsh": { |           "zsh": { | ||||||
| @@ -26,15 +28,11 @@ | |||||||
|         "editor.formatOnType": true, |         "editor.formatOnType": true, | ||||||
|         "files.trimTrailingWhitespace": true, |         "files.trimTrailingWhitespace": true, | ||||||
|         "python.pythonPath": "/usr/local/bin/python3", |         "python.pythonPath": "/usr/local/bin/python3", | ||||||
|     "python.linting.pylintEnabled": true, |  | ||||||
|     "python.linting.enabled": true, |  | ||||||
|         "python.formatting.provider": "black", |         "python.formatting.provider": "black", | ||||||
|     "python.formatting.blackArgs": ["--target-version", "py310"], |         "python.formatting.blackArgs": ["--target-version", "py312"], | ||||||
|     "python.formatting.blackPath": "/usr/local/bin/black", |         "python.formatting.blackPath": "/usr/local/bin/black" | ||||||
|     "python.linting.banditPath": "/usr/local/bin/bandit", |  | ||||||
|     "python.linting.flake8Path": "/usr/local/bin/flake8", |  | ||||||
|     "python.linting.mypyPath": "/usr/local/bin/mypy", |  | ||||||
|     "python.linting.pylintPath": "/usr/local/bin/pylint", |  | ||||||
|     "python.linting.pydocstylePath": "/usr/local/bin/pydocstyle" |  | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|  |   }, | ||||||
|  |   "mounts": ["type=volume,target=/var/lib/docker"] | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										36
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										36
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							| @@ -33,7 +33,7 @@ on: | |||||||
|       - setup.py |       - setup.py | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   DEFAULT_PYTHON: "3.11" |   DEFAULT_PYTHON: "3.12" | ||||||
|   BUILD_NAME: supervisor |   BUILD_NAME: supervisor | ||||||
|   BUILD_TYPE: supervisor |   BUILD_TYPE: supervisor | ||||||
|  |  | ||||||
| @@ -53,7 +53,7 @@ jobs: | |||||||
|       requirements: ${{ steps.requirements.outputs.changed }} |       requirements: ${{ steps.requirements.outputs.changed }} | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|  |  | ||||||
| @@ -70,13 +70,13 @@ jobs: | |||||||
|       - name: Get changed files |       - name: Get changed files | ||||||
|         id: changed_files |         id: changed_files | ||||||
|         if: steps.version.outputs.publish == 'false' |         if: steps.version.outputs.publish == 'false' | ||||||
|         uses: jitterbit/get-changed-files@v1 |         uses: masesgroup/retrieve-changed-files@v3.0.0 | ||||||
|  |  | ||||||
|       - name: Check if requirements files changed |       - name: Check if requirements files changed | ||||||
|         id: requirements |         id: requirements | ||||||
|         run: | |         run: | | ||||||
|           if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then |           if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then | ||||||
|             echo "::set-output name=changed::true" |             echo "changed=true" >> "$GITHUB_OUTPUT" | ||||||
|           fi |           fi | ||||||
|  |  | ||||||
|   build: |   build: | ||||||
| @@ -92,7 +92,7 @@ jobs: | |||||||
|         arch: ${{ fromJson(needs.init.outputs.architectures) }} |         arch: ${{ fromJson(needs.init.outputs.architectures) }} | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|  |  | ||||||
| @@ -106,13 +106,13 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Build wheels |       - name: Build wheels | ||||||
|         if: needs.init.outputs.requirements == 'true' |         if: needs.init.outputs.requirements == 'true' | ||||||
|         uses: home-assistant/wheels@2023.04.0 |         uses: home-assistant/wheels@2024.01.0 | ||||||
|         with: |         with: | ||||||
|           abi: cp311 |           abi: cp312 | ||||||
|           tag: musllinux_1_2 |           tag: musllinux_1_2 | ||||||
|           arch: ${{ matrix.arch }} |           arch: ${{ matrix.arch }} | ||||||
|           wheels-key: ${{ secrets.WHEELS_KEY }} |           wheels-key: ${{ secrets.WHEELS_KEY }} | ||||||
|           apk: "libffi-dev;openssl-dev" |           apk: "libffi-dev;openssl-dev;yaml-dev" | ||||||
|           skip-binary: aiohttp |           skip-binary: aiohttp | ||||||
|           env-file: true |           env-file: true | ||||||
|           requirements: "requirements.txt" |           requirements: "requirements.txt" | ||||||
| @@ -125,20 +125,20 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Set up Python ${{ env.DEFAULT_PYTHON }} |       - name: Set up Python ${{ env.DEFAULT_PYTHON }} | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ env.DEFAULT_PYTHON }} |           python-version: ${{ env.DEFAULT_PYTHON }} | ||||||
|  |  | ||||||
|       - name: Install Cosign |       - name: Install Cosign | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: sigstore/cosign-installer@v3.1.2 |         uses: sigstore/cosign-installer@v3.3.0 | ||||||
|         with: |         with: | ||||||
|           cosign-release: "v2.0.2" |           cosign-release: "v2.0.2" | ||||||
|  |  | ||||||
|       - name: Install dirhash and calc hash |       - name: Install dirhash and calc hash | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         run: | |         run: | | ||||||
|           pip3 install dirhash |           pip3 install setuptools dirhash | ||||||
|           dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")" |           dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")" | ||||||
|           echo "${dir_hash}" > rootfs/supervisor.sha256 |           echo "${dir_hash}" > rootfs/supervisor.sha256 | ||||||
|  |  | ||||||
| @@ -149,7 +149,7 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Login to GitHub Container Registry |       - name: Login to GitHub Container Registry | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: docker/login-action@v2.2.0 |         uses: docker/login-action@v3.0.0 | ||||||
|         with: |         with: | ||||||
|           registry: ghcr.io |           registry: ghcr.io | ||||||
|           username: ${{ github.repository_owner }} |           username: ${{ github.repository_owner }} | ||||||
| @@ -160,7 +160,7 @@ jobs: | |||||||
|         run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV |         run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV | ||||||
|  |  | ||||||
|       - name: Build supervisor |       - name: Build supervisor | ||||||
|         uses: home-assistant/builder@2023.08.0 |         uses: home-assistant/builder@2024.01.0 | ||||||
|         with: |         with: | ||||||
|           args: | |           args: | | ||||||
|             $BUILD_ARGS \ |             $BUILD_ARGS \ | ||||||
| @@ -178,7 +178,7 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|  |  | ||||||
|       - name: Initialize git |       - name: Initialize git | ||||||
|         if: needs.init.outputs.publish == 'true' |         if: needs.init.outputs.publish == 'true' | ||||||
| @@ -203,11 +203,11 @@ jobs: | |||||||
|     timeout-minutes: 60 |     timeout-minutes: 60 | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|  |  | ||||||
|       - name: Build the Supervisor |       - name: Build the Supervisor | ||||||
|         if: needs.init.outputs.publish != 'true' |         if: needs.init.outputs.publish != 'true' | ||||||
|         uses: home-assistant/builder@2023.08.0 |         uses: home-assistant/builder@2024.01.0 | ||||||
|         with: |         with: | ||||||
|           args: | |           args: | | ||||||
|             --test \ |             --test \ | ||||||
| @@ -324,7 +324,7 @@ jobs: | |||||||
|           if [ "$(echo $test | jq -r '.result')" != "ok" ]; then |           if [ "$(echo $test | jq -r '.result')" != "ok" ]; then | ||||||
|             exit 1 |             exit 1 | ||||||
|           fi |           fi | ||||||
|           echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')" |           echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT" | ||||||
|  |  | ||||||
|       - name: Uninstall SSH add-on |       - name: Uninstall SSH add-on | ||||||
|         run: | |         run: | | ||||||
|   | |||||||
							
								
								
									
										95
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										95
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -8,8 +8,8 @@ on: | |||||||
|   pull_request: ~ |   pull_request: ~ | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   DEFAULT_PYTHON: "3.11" |   DEFAULT_PYTHON: "3.12" | ||||||
|   PRE_COMMIT_HOME: ~/.cache/pre-commit |   PRE_COMMIT_CACHE: ~/.cache/pre-commit | ||||||
|  |  | ||||||
| concurrency: | concurrency: | ||||||
|   group: "${{ github.workflow }}-${{ github.ref }}" |   group: "${{ github.workflow }}-${{ github.ref }}" | ||||||
| @@ -25,15 +25,15 @@ jobs: | |||||||
|     name: Prepare Python dependencies |     name: Prepare Python dependencies | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python |       - name: Set up Python | ||||||
|         id: python |         id: python | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ env.DEFAULT_PYTHON }} |           python-version: ${{ env.DEFAULT_PYTHON }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -47,9 +47,10 @@ jobs: | |||||||
|           pip install -r requirements.txt -r requirements_tests.txt |           pip install -r requirements.txt -r requirements_tests.txt | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|  |           lookup-only: true | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|           restore-keys: | |           restore-keys: | | ||||||
| @@ -66,15 +67,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -87,7 +88,7 @@ jobs: | |||||||
|       - name: Run black |       - name: Run black | ||||||
|         run: | |         run: | | ||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
|           black --target-version py38 --check supervisor tests setup.py |           black --target-version py312 --check supervisor tests setup.py | ||||||
|  |  | ||||||
|   lint-dockerfile: |   lint-dockerfile: | ||||||
|     name: Check Dockerfile |     name: Check Dockerfile | ||||||
| @@ -95,7 +96,7 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Register hadolint problem matcher |       - name: Register hadolint problem matcher | ||||||
|         run: | |         run: | | ||||||
|           echo "::add-matcher::.github/workflows/matchers/hadolint.json" |           echo "::add-matcher::.github/workflows/matchers/hadolint.json" | ||||||
| @@ -110,15 +111,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -130,9 +131,9 @@ jobs: | |||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|       - name: Fail job if cache restore failed |       - name: Fail job if cache restore failed | ||||||
| @@ -154,15 +155,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -186,15 +187,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -206,9 +207,9 @@ jobs: | |||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|       - name: Fail job if cache restore failed |       - name: Fail job if cache restore failed | ||||||
| @@ -227,15 +228,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -247,9 +248,9 @@ jobs: | |||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|       - name: Fail job if cache restore failed |       - name: Fail job if cache restore failed | ||||||
| @@ -271,15 +272,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -303,15 +304,15 @@ jobs: | |||||||
|     needs: prepare |     needs: prepare | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -323,9 +324,9 @@ jobs: | |||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Restore pre-commit environment from cache |       - name: Restore pre-commit environment from cache | ||||||
|         id: cache-precommit |         id: cache-precommit | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: ${{ env.PRE_COMMIT_HOME }} |           path: ${{ env.PRE_COMMIT_CACHE }} | ||||||
|           key: | |           key: | | ||||||
|             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} |             ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} | ||||||
|       - name: Fail job if cache restore failed |       - name: Fail job if cache restore failed | ||||||
| @@ -344,19 +345,19 @@ jobs: | |||||||
|     name: Run tests Python ${{ needs.prepare.outputs.python-version }} |     name: Run tests Python ${{ needs.prepare.outputs.python-version }} | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Install Cosign |       - name: Install Cosign | ||||||
|         uses: sigstore/cosign-installer@v3.1.2 |         uses: sigstore/cosign-installer@v3.3.0 | ||||||
|         with: |         with: | ||||||
|           cosign-release: "v2.0.2" |           cosign-release: "v2.0.2" | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -391,7 +392,7 @@ jobs: | |||||||
|             -o console_output_style=count \ |             -o console_output_style=count \ | ||||||
|             tests |             tests | ||||||
|       - name: Upload coverage artifact |       - name: Upload coverage artifact | ||||||
|         uses: actions/upload-artifact@v3.1.3 |         uses: actions/upload-artifact@v4.0.0 | ||||||
|         with: |         with: | ||||||
|           name: coverage-${{ matrix.python-version }} |           name: coverage-${{ matrix.python-version }} | ||||||
|           path: .coverage |           path: .coverage | ||||||
| @@ -402,15 +403,15 @@ jobs: | |||||||
|     needs: ["pytest", "prepare"] |     needs: ["pytest", "prepare"] | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Set up Python ${{ needs.prepare.outputs.python-version }} |       - name: Set up Python ${{ needs.prepare.outputs.python-version }} | ||||||
|         uses: actions/setup-python@v4.7.0 |         uses: actions/setup-python@v5.0.0 | ||||||
|         id: python |         id: python | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ needs.prepare.outputs.python-version }} |           python-version: ${{ needs.prepare.outputs.python-version }} | ||||||
|       - name: Restore Python virtual environment |       - name: Restore Python virtual environment | ||||||
|         id: cache-venv |         id: cache-venv | ||||||
|         uses: actions/cache@v3.3.2 |         uses: actions/cache@v3.3.3 | ||||||
|         with: |         with: | ||||||
|           path: venv |           path: venv | ||||||
|           key: | |           key: | | ||||||
| @@ -421,7 +422,7 @@ jobs: | |||||||
|           echo "Failed to restore Python virtual environment from cache" |           echo "Failed to restore Python virtual environment from cache" | ||||||
|           exit 1 |           exit 1 | ||||||
|       - name: Download all coverage artifacts |       - name: Download all coverage artifacts | ||||||
|         uses: actions/download-artifact@v3 |         uses: actions/download-artifact@v4.1.1 | ||||||
|       - name: Combine coverage results |       - name: Combine coverage results | ||||||
|         run: | |         run: | | ||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/lock.yml
									
									
									
									
										vendored
									
									
								
							| @@ -9,7 +9,7 @@ jobs: | |||||||
|   lock: |   lock: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: dessant/lock-threads@v4.0.1 |       - uses: dessant/lock-threads@v5.0.1 | ||||||
|         with: |         with: | ||||||
|           github-token: ${{ github.token }} |           github-token: ${{ github.token }} | ||||||
|           issue-inactive-days: "30" |           issue-inactive-days: "30" | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								.github/workflows/release-drafter.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/release-drafter.yml
									
									
									
									
										vendored
									
									
								
							| @@ -11,7 +11,7 @@ jobs: | |||||||
|     name: Release Drafter |     name: Release Drafter | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout the repository |       - name: Checkout the repository | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|  |  | ||||||
| @@ -33,10 +33,10 @@ jobs: | |||||||
|  |  | ||||||
|           echo Current version:    $latest |           echo Current version:    $latest | ||||||
|           echo New target version: $datepre.$newpost |           echo New target version: $datepre.$newpost | ||||||
|           echo "::set-output name=version::$datepre.$newpost" |           echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT" | ||||||
|  |  | ||||||
|       - name: Run Release Drafter |       - name: Run Release Drafter | ||||||
|         uses: release-drafter/release-drafter@v5.24.0 |         uses: release-drafter/release-drafter@v5.25.0 | ||||||
|         with: |         with: | ||||||
|           tag: ${{ steps.version.outputs.version }} |           tag: ${{ steps.version.outputs.version }} | ||||||
|           name: ${{ steps.version.outputs.version }} |           name: ${{ steps.version.outputs.version }} | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								.github/workflows/sentry.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/sentry.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -10,9 +10,9 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - name: Check out code from GitHub |       - name: Check out code from GitHub | ||||||
|         uses: actions/checkout@v4.0.0 |         uses: actions/checkout@v4.1.1 | ||||||
|       - name: Sentry Release |       - name: Sentry Release | ||||||
|         uses: getsentry/action-release@v1.4.1 |         uses: getsentry/action-release@v1.6.0 | ||||||
|         env: |         env: | ||||||
|           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} |           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} | ||||||
|           SENTRY_ORG: ${{ secrets.SENTRY_ORG }} |           SENTRY_ORG: ${{ secrets.SENTRY_ORG }} | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							| @@ -9,7 +9,7 @@ jobs: | |||||||
|   stale: |   stale: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/stale@v8.0.0 |       - uses: actions/stale@v9.0.0 | ||||||
|         with: |         with: | ||||||
|           repo-token: ${{ secrets.GITHUB_TOKEN }} |           repo-token: ${{ secrets.GITHUB_TOKEN }} | ||||||
|           days-before-stale: 30 |           days-before-stale: 30 | ||||||
|   | |||||||
| @@ -3,4 +3,5 @@ ignored: | |||||||
|   - DL3006 |   - DL3006 | ||||||
|   - DL3013 |   - DL3013 | ||||||
|   - DL3018 |   - DL3018 | ||||||
|  |   - DL3042 | ||||||
|   - SC2155 |   - SC2155 | ||||||
|   | |||||||
| @@ -1,16 +1,16 @@ | |||||||
| repos: | repos: | ||||||
|   - repo: https://github.com/psf/black |   - repo: https://github.com/psf/black | ||||||
|     rev: 23.1.0 |     rev: 23.12.1 | ||||||
|     hooks: |     hooks: | ||||||
|       - id: black |       - id: black | ||||||
|         args: |         args: | ||||||
|           - --safe |           - --safe | ||||||
|           - --quiet |           - --quiet | ||||||
|           - --target-version |           - --target-version | ||||||
|           - py310 |           - py312 | ||||||
|         files: ^((supervisor|tests)/.+)?[^/]+\.py$ |         files: ^((supervisor|tests)/.+)?[^/]+\.py$ | ||||||
|   - repo: https://github.com/PyCQA/flake8 |   - repo: https://github.com/PyCQA/flake8 | ||||||
|     rev: 6.0.0 |     rev: 7.0.0 | ||||||
|     hooks: |     hooks: | ||||||
|       - id: flake8 |       - id: flake8 | ||||||
|         additional_dependencies: |         additional_dependencies: | ||||||
| @@ -18,17 +18,17 @@ repos: | |||||||
|           - pydocstyle==6.3.0 |           - pydocstyle==6.3.0 | ||||||
|         files: ^(supervisor|script|tests)/.+\.py$ |         files: ^(supervisor|script|tests)/.+\.py$ | ||||||
|   - repo: https://github.com/pre-commit/pre-commit-hooks |   - repo: https://github.com/pre-commit/pre-commit-hooks | ||||||
|     rev: v4.3.0 |     rev: v4.5.0 | ||||||
|     hooks: |     hooks: | ||||||
|       - id: check-executables-have-shebangs |       - id: check-executables-have-shebangs | ||||||
|         stages: [manual] |         stages: [manual] | ||||||
|       - id: check-json |       - id: check-json | ||||||
|   - repo: https://github.com/PyCQA/isort |   - repo: https://github.com/PyCQA/isort | ||||||
|     rev: 5.12.0 |     rev: 5.13.2 | ||||||
|     hooks: |     hooks: | ||||||
|       - id: isort |       - id: isort | ||||||
|   - repo: https://github.com/asottile/pyupgrade |   - repo: https://github.com/asottile/pyupgrade | ||||||
|     rev: v3.4.0 |     rev: v3.15.0 | ||||||
|     hooks: |     hooks: | ||||||
|       - id: pyupgrade |       - id: pyupgrade | ||||||
|         args: [--py310-plus] |         args: [--py312-plus] | ||||||
|   | |||||||
| @@ -15,6 +15,7 @@ WORKDIR /usr/src | |||||||
| RUN \ | RUN \ | ||||||
|     set -x \ |     set -x \ | ||||||
|     && apk add --no-cache \ |     && apk add --no-cache \ | ||||||
|  |         findutils \ | ||||||
|         eudev \ |         eudev \ | ||||||
|         eudev-libs \ |         eudev-libs \ | ||||||
|         git \ |         git \ | ||||||
| @@ -22,6 +23,7 @@ RUN \ | |||||||
|         libpulse \ |         libpulse \ | ||||||
|         musl \ |         musl \ | ||||||
|         openssl \ |         openssl \ | ||||||
|  |         yaml \ | ||||||
|     \ |     \ | ||||||
|     && curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \ |     && curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \ | ||||||
|     && chmod a+x /usr/bin/cosign |     && chmod a+x /usr/bin/cosign | ||||||
| @@ -30,15 +32,14 @@ RUN \ | |||||||
| COPY requirements.txt . | COPY requirements.txt . | ||||||
| RUN \ | RUN \ | ||||||
|     export MAKEFLAGS="-j$(nproc)" \ |     export MAKEFLAGS="-j$(nproc)" \ | ||||||
|     && pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \ |     && pip3 install --only-binary=:all: \ | ||||||
|         "https://wheels.home-assistant.io/musllinux/" \ |  | ||||||
|         -r ./requirements.txt \ |         -r ./requirements.txt \ | ||||||
|     && rm -f requirements.txt |     && rm -f requirements.txt | ||||||
|  |  | ||||||
| # Install Home Assistant Supervisor | # Install Home Assistant Supervisor | ||||||
| COPY . supervisor | COPY . supervisor | ||||||
| RUN \ | RUN \ | ||||||
|     pip3 install --no-cache-dir -e ./supervisor \ |     pip3 install -e ./supervisor \ | ||||||
|     && python3 -m compileall ./supervisor/supervisor |     && python3 -m compileall ./supervisor/supervisor | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										10
									
								
								build.yaml
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								build.yaml
									
									
									
									
									
								
							| @@ -1,10 +1,10 @@ | |||||||
| image: ghcr.io/home-assistant/{arch}-hassio-supervisor | image: ghcr.io/home-assistant/{arch}-hassio-supervisor | ||||||
| build_from: | build_from: | ||||||
|   aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16 |   aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.18 | ||||||
|   armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16 |   armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.18 | ||||||
|   armv7: ghcr.io/home-assistant/armv7-base-python:3.11-alpine3.16 |   armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.18 | ||||||
|   amd64: ghcr.io/home-assistant/amd64-base-python:3.11-alpine3.16 |   amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.18 | ||||||
|   i386: ghcr.io/home-assistant/i386-base-python:3.11-alpine3.16 |   i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.18 | ||||||
| codenotary: | codenotary: | ||||||
|   signer: notary@home-assistant.io |   signer: notary@home-assistant.io | ||||||
|   base_image: notary@home-assistant.io |   base_image: notary@home-assistant.io | ||||||
|   | |||||||
							
								
								
									
										45
									
								
								pylintrc
									
									
									
									
									
								
							
							
						
						
									
										45
									
								
								pylintrc
									
									
									
									
									
								
							| @@ -1,45 +0,0 @@ | |||||||
| [MASTER] |  | ||||||
| reports=no |  | ||||||
| jobs=2 |  | ||||||
|  |  | ||||||
| good-names=id,i,j,k,ex,Run,_,fp,T,os |  | ||||||
|  |  | ||||||
| extension-pkg-whitelist= |  | ||||||
|   ciso8601 |  | ||||||
|  |  | ||||||
| # Reasons disabled: |  | ||||||
| # format - handled by black |  | ||||||
| # locally-disabled - it spams too much |  | ||||||
| # duplicate-code - unavoidable |  | ||||||
| # cyclic-import - doesn't test if both import on load |  | ||||||
| # abstract-class-not-used - is flaky, should not show up but does |  | ||||||
| # unused-argument - generic callbacks and setup methods create a lot of warnings |  | ||||||
| # too-many-* - are not enforced for the sake of readability |  | ||||||
| # too-few-* - same as too-many-* |  | ||||||
| # abstract-method - with intro of async there are always methods missing |  | ||||||
| disable= |  | ||||||
|   format, |  | ||||||
|   abstract-method, |  | ||||||
|   cyclic-import, |  | ||||||
|   duplicate-code, |  | ||||||
|   locally-disabled, |  | ||||||
|   no-else-return, |  | ||||||
|   not-context-manager, |  | ||||||
|   too-few-public-methods, |  | ||||||
|   too-many-arguments, |  | ||||||
|   too-many-branches, |  | ||||||
|   too-many-instance-attributes, |  | ||||||
|   too-many-lines, |  | ||||||
|   too-many-locals, |  | ||||||
|   too-many-public-methods, |  | ||||||
|   too-many-return-statements, |  | ||||||
|   too-many-statements, |  | ||||||
|   unused-argument, |  | ||||||
|   consider-using-with |  | ||||||
|  |  | ||||||
| [EXCEPTIONS] |  | ||||||
| overgeneral-exceptions=builtins.Exception |  | ||||||
|  |  | ||||||
|  |  | ||||||
| [TYPECHECK] |  | ||||||
| ignored-modules = distutils |  | ||||||
							
								
								
									
										112
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										112
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,112 @@ | |||||||
|  | [build-system] | ||||||
|  | requires = ["setuptools~=68.0.0", "wheel~=0.40.0"] | ||||||
|  | build-backend = "setuptools.build_meta" | ||||||
|  |  | ||||||
|  | [project] | ||||||
|  | name = "Supervisor" | ||||||
|  | dynamic = ["version", "dependencies"] | ||||||
|  | license = { text = "Apache-2.0" } | ||||||
|  | description = "Open-source private cloud os for Home-Assistant based on HassOS" | ||||||
|  | readme = "README.md" | ||||||
|  | authors = [ | ||||||
|  |     { name = "The Home Assistant Authors", email = "hello@home-assistant.io" }, | ||||||
|  | ] | ||||||
|  | keywords = ["docker", "home-assistant", "api"] | ||||||
|  | requires-python = ">=3.12.0" | ||||||
|  |  | ||||||
|  | [project.urls] | ||||||
|  | "Homepage" = "https://www.home-assistant.io/" | ||||||
|  | "Source Code" = "https://github.com/home-assistant/supervisor" | ||||||
|  | "Bug Reports" = "https://github.com/home-assistant/supervisor/issues" | ||||||
|  | "Docs: Dev" = "https://developers.home-assistant.io/" | ||||||
|  | "Discord" = "https://www.home-assistant.io/join-chat/" | ||||||
|  | "Forum" = "https://community.home-assistant.io/" | ||||||
|  |  | ||||||
|  | [tool.setuptools] | ||||||
|  | platforms = ["any"] | ||||||
|  | zip-safe = false | ||||||
|  | include-package-data = true | ||||||
|  |  | ||||||
|  | [tool.setuptools.packages.find] | ||||||
|  | include = ["supervisor*"] | ||||||
|  |  | ||||||
|  | [tool.pylint.MAIN] | ||||||
|  | py-version = "3.11" | ||||||
|  | # Use a conservative default here; 2 should speed up most setups and not hurt | ||||||
|  | # any too bad. Override on command line as appropriate. | ||||||
|  | jobs = 2 | ||||||
|  | persistent = false | ||||||
|  | extension-pkg-allow-list = ["ciso8601"] | ||||||
|  |  | ||||||
|  | [tool.pylint.BASIC] | ||||||
|  | class-const-naming-style = "any" | ||||||
|  | good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"] | ||||||
|  |  | ||||||
|  | [tool.pylint."MESSAGES CONTROL"] | ||||||
|  | # Reasons disabled: | ||||||
|  | # format - handled by black | ||||||
|  | # abstract-method - with intro of async there are always methods missing | ||||||
|  | # cyclic-import - doesn't test if both import on load | ||||||
|  | # duplicate-code - unavoidable | ||||||
|  | # locally-disabled - it spams too much | ||||||
|  | # too-many-* - are not enforced for the sake of readability | ||||||
|  | # too-few-* - same as too-many-* | ||||||
|  | # unused-argument - generic callbacks and setup methods create a lot of warnings | ||||||
|  | disable = [ | ||||||
|  |     "format", | ||||||
|  |     "abstract-method", | ||||||
|  |     "cyclic-import", | ||||||
|  |     "duplicate-code", | ||||||
|  |     "locally-disabled", | ||||||
|  |     "no-else-return", | ||||||
|  |     "not-context-manager", | ||||||
|  |     "too-few-public-methods", | ||||||
|  |     "too-many-arguments", | ||||||
|  |     "too-many-branches", | ||||||
|  |     "too-many-instance-attributes", | ||||||
|  |     "too-many-lines", | ||||||
|  |     "too-many-locals", | ||||||
|  |     "too-many-public-methods", | ||||||
|  |     "too-many-return-statements", | ||||||
|  |     "too-many-statements", | ||||||
|  |     "unused-argument", | ||||||
|  |     "consider-using-with", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  | [tool.pylint.REPORTS] | ||||||
|  | score = false | ||||||
|  |  | ||||||
|  | [tool.pylint.TYPECHECK] | ||||||
|  | ignored-modules = ["distutils"] | ||||||
|  |  | ||||||
|  | [tool.pylint.FORMAT] | ||||||
|  | expected-line-ending-format = "LF" | ||||||
|  |  | ||||||
|  | [tool.pylint.EXCEPTIONS] | ||||||
|  | overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] | ||||||
|  |  | ||||||
|  | [tool.pytest.ini_options] | ||||||
|  | testpaths = ["tests"] | ||||||
|  | norecursedirs = [".git"] | ||||||
|  | log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" | ||||||
|  | log_date_format = "%Y-%m-%d %H:%M:%S" | ||||||
|  | asyncio_mode = "auto" | ||||||
|  | filterwarnings = [ | ||||||
|  |     "error", | ||||||
|  |     "ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash", | ||||||
|  |     "ignore::pytest.PytestUnraisableExceptionWarning", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  | [tool.isort] | ||||||
|  | multi_line_output = 3 | ||||||
|  | include_trailing_comma = true | ||||||
|  | force_grid_wrap = 0 | ||||||
|  | line_length = 88 | ||||||
|  | indent = "    " | ||||||
|  | force_sort_within_sections = true | ||||||
|  | sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] | ||||||
|  | default_section = "THIRDPARTY" | ||||||
|  | forced_separate = "tests" | ||||||
|  | combine_as_imports = true | ||||||
|  | use_parentheses = true | ||||||
|  | known_first_party = ["supervisor", "tests"] | ||||||
| @@ -1,2 +0,0 @@ | |||||||
| [pytest] |  | ||||||
| asyncio_mode = auto |  | ||||||
| @@ -1,26 +1,30 @@ | |||||||
| aiodns==3.0.0 | aiodns==3.1.1 | ||||||
| aiohttp==3.8.5 | aiohttp==3.9.1 | ||||||
|  | aiohttp-fast-url-dispatcher==0.3.0 | ||||||
| async_timeout==4.0.3 | async_timeout==4.0.3 | ||||||
| atomicwrites-homeassistant==1.4.1 | atomicwrites-homeassistant==1.4.1 | ||||||
| attrs==23.1.0 | attrs==23.2.0 | ||||||
| awesomeversion==23.8.0 | awesomeversion==23.11.0 | ||||||
| brotli==1.1.0 | brotli==1.1.0 | ||||||
| ciso8601==2.3.0 | ciso8601==2.3.1 | ||||||
| colorlog==6.7.0 | colorlog==6.8.0 | ||||||
| cpe==1.2.1 | cpe==1.2.1 | ||||||
| cryptography==41.0.3 | cryptography==41.0.7 | ||||||
| debugpy==1.7.0 | debugpy==1.8.0 | ||||||
| deepmerge==1.1.0 | deepmerge==1.1.1 | ||||||
| dirhash==0.2.1 | dirhash==0.2.1 | ||||||
| docker==6.1.3 | docker==7.0.0 | ||||||
| faust-cchardet==2.1.19 | faust-cchardet==2.1.19 | ||||||
| gitpython==3.1.36 | gitpython==3.1.41 | ||||||
| jinja2==3.1.2 | jinja2==3.1.3 | ||||||
|  | orjson==3.9.10 | ||||||
| pulsectl==23.5.2 | pulsectl==23.5.2 | ||||||
| pyudev==0.24.1 | pyudev==0.24.1 | ||||||
| ruamel.yaml==0.17.21 | PyYAML==6.0.1 | ||||||
| securetar==2023.3.0 | securetar==2023.12.0 | ||||||
| sentry-sdk==1.30.0 | sentry-sdk==1.39.2 | ||||||
| voluptuous==0.13.1 | setuptools==69.0.3 | ||||||
| dbus-fast==2.2.0 | voluptuous==0.14.1 | ||||||
| typing_extensions==4.7.1 | dbus-fast==2.21.0 | ||||||
|  | typing_extensions==4.9.0 | ||||||
|  | zlib-fast==0.1.0 | ||||||
|   | |||||||
| @@ -1,16 +1,16 @@ | |||||||
| black==23.9.1 | black==23.12.1 | ||||||
| coverage==7.3.1 | coverage==7.4.0 | ||||||
| flake8-docstrings==1.7.0 | flake8-docstrings==1.7.0 | ||||||
| flake8==6.1.0 | flake8==7.0.0 | ||||||
| pre-commit==3.4.0 | pre-commit==3.6.0 | ||||||
| pydocstyle==6.3.0 | pydocstyle==6.3.0 | ||||||
| pylint==2.17.5 | pylint==3.0.3 | ||||||
| pytest-aiohttp==1.0.5 | pytest-aiohttp==1.0.5 | ||||||
| pytest-asyncio==0.18.3 | pytest-asyncio==0.23.3 | ||||||
| pytest-cov==4.1.0 | pytest-cov==4.1.0 | ||||||
| pytest-timeout==2.1.0 | pytest-timeout==2.2.0 | ||||||
| pytest==7.4.2 | pytest==7.4.4 | ||||||
| pyupgrade==3.10.1 | pyupgrade==3.15.0 | ||||||
| time-machine==2.12.0 | time-machine==2.13.0 | ||||||
| typing_extensions==4.7.1 | typing_extensions==4.9.0 | ||||||
| urllib3==2.0.4 | urllib3==2.1.0 | ||||||
|   | |||||||
| @@ -15,7 +15,7 @@ do | |||||||
|     if [[ "${supervisor_state}" = "running"  ]]; then |     if [[ "${supervisor_state}" = "running"  ]]; then | ||||||
|  |  | ||||||
|         # Check API |         # Check API | ||||||
|         if bashio::supervisor.ping; then |         if bashio::supervisor.ping > /dev/null; then | ||||||
|             failed_count=0 |             failed_count=0 | ||||||
|         else |         else | ||||||
|             bashio::log.warning "Maybe found an issue on API healthy" |             bashio::log.warning "Maybe found an issue on API healthy" | ||||||
|   | |||||||
							
								
								
									
										14
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,17 +1,3 @@ | |||||||
| [isort] |  | ||||||
| multi_line_output = 3 |  | ||||||
| include_trailing_comma=True |  | ||||||
| force_grid_wrap=0 |  | ||||||
| line_length=88 |  | ||||||
| indent = "    " |  | ||||||
| force_sort_within_sections = true |  | ||||||
| sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER |  | ||||||
| default_section = THIRDPARTY |  | ||||||
| forced_separate = tests |  | ||||||
| combine_as_imports = true |  | ||||||
| use_parentheses = true |  | ||||||
| known_first_party = supervisor,tests |  | ||||||
|  |  | ||||||
| [flake8] | [flake8] | ||||||
| exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build | exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build | ||||||
| doctests = True | doctests = True | ||||||
|   | |||||||
							
								
								
									
										75
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										75
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,60 +1,27 @@ | |||||||
| """Home Assistant Supervisor setup.""" | """Home Assistant Supervisor setup.""" | ||||||
|  | from pathlib import Path | ||||||
|  | import re | ||||||
|  |  | ||||||
| from setuptools import setup | from setuptools import setup | ||||||
|  |  | ||||||
| from supervisor.const import SUPERVISOR_VERSION | RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$") | ||||||
|  |  | ||||||
|  | SUPERVISOR_DIR = Path(__file__).parent | ||||||
|  | REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt" | ||||||
|  | CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py" | ||||||
|  |  | ||||||
|  | REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8") | ||||||
|  | CONSTANTS = CONST_FILE.read_text(encoding="utf-8") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _get_supervisor_version(): | ||||||
|  |     for line in CONSTANTS.split("/n"): | ||||||
|  |         if match := RE_SUPERVISOR_VERSION.match(line): | ||||||
|  |             return match.group(1) | ||||||
|  |     return "99.9.9dev" | ||||||
|  |  | ||||||
|  |  | ||||||
| setup( | setup( | ||||||
|     name="Supervisor", |     version=_get_supervisor_version(), | ||||||
|     version=SUPERVISOR_VERSION, |     dependencies=REQUIREMENTS.split("/n"), | ||||||
|     license="BSD License", |  | ||||||
|     author="The Home Assistant Authors", |  | ||||||
|     author_email="hello@home-assistant.io", |  | ||||||
|     url="https://home-assistant.io/", |  | ||||||
|     description=("Open-source private cloud os for Home-Assistant" " based on HassOS"), |  | ||||||
|     long_description=( |  | ||||||
|         "A maintainless private cloud operator system that" |  | ||||||
|         "setup a Home-Assistant instance. Based on HassOS" |  | ||||||
|     ), |  | ||||||
|     classifiers=[ |  | ||||||
|         "Intended Audience :: End Users/Desktop", |  | ||||||
|         "Intended Audience :: Developers", |  | ||||||
|         "License :: OSI Approved :: Apache Software License", |  | ||||||
|         "Operating System :: OS Independent", |  | ||||||
|         "Topic :: Home Automation", |  | ||||||
|         "Topic :: Software Development :: Libraries :: Python Modules", |  | ||||||
|         "Topic :: Scientific/Engineering :: Atmospheric Science", |  | ||||||
|         "Development Status :: 5 - Production/Stable", |  | ||||||
|         "Intended Audience :: Developers", |  | ||||||
|         "Programming Language :: Python :: 3.8", |  | ||||||
|     ], |  | ||||||
|     keywords=["docker", "home-assistant", "api"], |  | ||||||
|     zip_safe=False, |  | ||||||
|     platforms="any", |  | ||||||
|     packages=[ |  | ||||||
|         "supervisor.addons", |  | ||||||
|         "supervisor.api", |  | ||||||
|         "supervisor.backups", |  | ||||||
|         "supervisor.dbus.network", |  | ||||||
|         "supervisor.dbus.network.setting", |  | ||||||
|         "supervisor.dbus", |  | ||||||
|         "supervisor.discovery.services", |  | ||||||
|         "supervisor.discovery", |  | ||||||
|         "supervisor.docker", |  | ||||||
|         "supervisor.homeassistant", |  | ||||||
|         "supervisor.host", |  | ||||||
|         "supervisor.jobs", |  | ||||||
|         "supervisor.misc", |  | ||||||
|         "supervisor.plugins", |  | ||||||
|         "supervisor.resolution.checks", |  | ||||||
|         "supervisor.resolution.evaluations", |  | ||||||
|         "supervisor.resolution.fixups", |  | ||||||
|         "supervisor.resolution", |  | ||||||
|         "supervisor.security", |  | ||||||
|         "supervisor.services.modules", |  | ||||||
|         "supervisor.services", |  | ||||||
|         "supervisor.store", |  | ||||||
|         "supervisor.utils", |  | ||||||
|         "supervisor", |  | ||||||
|     ], |  | ||||||
|     include_package_data=True, |  | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -5,7 +5,13 @@ import logging | |||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import sys | import sys | ||||||
|  |  | ||||||
| from supervisor import bootstrap | import zlib_fast | ||||||
|  |  | ||||||
|  | # Enable fast zlib before importing supervisor | ||||||
|  | zlib_fast.enable() | ||||||
|  |  | ||||||
|  | from supervisor import bootstrap  # noqa: E402 | ||||||
|  | from supervisor.utils.logging import activate_log_queue_handler  # noqa: E402 | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -38,6 +44,8 @@ if __name__ == "__main__": | |||||||
|     executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker") |     executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker") | ||||||
|     loop.set_default_executor(executor) |     loop.set_default_executor(executor) | ||||||
|  |  | ||||||
|  |     activate_log_queue_handler() | ||||||
|  |  | ||||||
|     _LOGGER.info("Initializing Supervisor setup") |     _LOGGER.info("Initializing Supervisor setup") | ||||||
|     coresys = loop.run_until_complete(bootstrap.initialize_coresys()) |     coresys = loop.run_until_complete(bootstrap.initialize_coresys()) | ||||||
|     loop.set_debug(coresys.config.debug) |     loop.set_debug(coresys.config.debug) | ||||||
|   | |||||||
| @@ -1,477 +1 @@ | |||||||
| """Init file for Supervisor add-ons.""" | """Init file for Supervisor add-ons.""" | ||||||
| import asyncio |  | ||||||
| from collections.abc import Awaitable |  | ||||||
| from contextlib import suppress |  | ||||||
| import logging |  | ||||||
| import tarfile |  | ||||||
| from typing import Union |  | ||||||
|  |  | ||||||
| from ..const import AddonBoot, AddonStartup, AddonState |  | ||||||
| from ..coresys import CoreSys, CoreSysAttributes |  | ||||||
| from ..exceptions import ( |  | ||||||
|     AddonConfigurationError, |  | ||||||
|     AddonsError, |  | ||||||
|     AddonsJobError, |  | ||||||
|     AddonsNotSupportedError, |  | ||||||
|     CoreDNSError, |  | ||||||
|     DockerAPIError, |  | ||||||
|     DockerError, |  | ||||||
|     DockerNotFound, |  | ||||||
|     HomeAssistantAPIError, |  | ||||||
|     HostAppArmorError, |  | ||||||
| ) |  | ||||||
| from ..jobs.decorator import Job, JobCondition |  | ||||||
| from ..resolution.const import ContextType, IssueType, SuggestionType |  | ||||||
| from ..store.addon import AddonStore |  | ||||||
| from ..utils import check_exception_chain |  | ||||||
| from ..utils.sentry import capture_exception |  | ||||||
| from .addon import Addon |  | ||||||
| from .const import ADDON_UPDATE_CONDITIONS |  | ||||||
| from .data import AddonsData |  | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
| AnyAddon = Union[Addon, AddonStore] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AddonManager(CoreSysAttributes): |  | ||||||
|     """Manage add-ons inside Supervisor.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, coresys: CoreSys): |  | ||||||
|         """Initialize Docker base wrapper.""" |  | ||||||
|         self.coresys: CoreSys = coresys |  | ||||||
|         self.data: AddonsData = AddonsData(coresys) |  | ||||||
|         self.local: dict[str, Addon] = {} |  | ||||||
|         self.store: dict[str, AddonStore] = {} |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def all(self) -> list[AnyAddon]: |  | ||||||
|         """Return a list of all add-ons.""" |  | ||||||
|         addons: dict[str, AnyAddon] = {**self.store, **self.local} |  | ||||||
|         return list(addons.values()) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def installed(self) -> list[Addon]: |  | ||||||
|         """Return a list of all installed add-ons.""" |  | ||||||
|         return list(self.local.values()) |  | ||||||
|  |  | ||||||
|     def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None: |  | ||||||
|         """Return an add-on from slug. |  | ||||||
|  |  | ||||||
|         Prio: |  | ||||||
|           1 - Local |  | ||||||
|           2 - Store |  | ||||||
|         """ |  | ||||||
|         if addon_slug in self.local: |  | ||||||
|             return self.local[addon_slug] |  | ||||||
|         if not local_only: |  | ||||||
|             return self.store.get(addon_slug) |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def from_token(self, token: str) -> Addon | None: |  | ||||||
|         """Return an add-on from Supervisor token.""" |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if token == addon.supervisor_token: |  | ||||||
|                 return addon |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     async def load(self) -> None: |  | ||||||
|         """Start up add-on management.""" |  | ||||||
|         tasks = [] |  | ||||||
|         for slug in self.data.system: |  | ||||||
|             addon = self.local[slug] = Addon(self.coresys, slug) |  | ||||||
|             tasks.append(self.sys_create_task(addon.load())) |  | ||||||
|  |  | ||||||
|         # Run initial tasks |  | ||||||
|         _LOGGER.info("Found %d installed add-ons", len(tasks)) |  | ||||||
|         if tasks: |  | ||||||
|             await asyncio.wait(tasks) |  | ||||||
|  |  | ||||||
|         # Sync DNS |  | ||||||
|         await self.sync_dns() |  | ||||||
|  |  | ||||||
|     async def boot(self, stage: AddonStartup) -> None: |  | ||||||
|         """Boot add-ons with mode auto.""" |  | ||||||
|         tasks: list[Addon] = [] |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if addon.boot != AddonBoot.AUTO or addon.startup != stage: |  | ||||||
|                 continue |  | ||||||
|             tasks.append(addon) |  | ||||||
|  |  | ||||||
|         # Evaluate add-ons which need to be started |  | ||||||
|         _LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks)) |  | ||||||
|         if not tasks: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Start Add-ons sequential |  | ||||||
|         # avoid issue on slow IO |  | ||||||
|         # Config.wait_boot is deprecated. Until addons update with healthchecks, |  | ||||||
|         # add a sleep task for it to keep the same minimum amount of wait time |  | ||||||
|         wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)] |  | ||||||
|         for addon in tasks: |  | ||||||
|             try: |  | ||||||
|                 if start_task := await addon.start(): |  | ||||||
|                     wait_boot.append(start_task) |  | ||||||
|             except AddonsError as err: |  | ||||||
|                 # Check if there is an system/user issue |  | ||||||
|                 if check_exception_chain( |  | ||||||
|                     err, (DockerAPIError, DockerNotFound, AddonConfigurationError) |  | ||||||
|                 ): |  | ||||||
|                     addon.boot = AddonBoot.MANUAL |  | ||||||
|                     addon.save_persist() |  | ||||||
|             except Exception as err:  # pylint: disable=broad-except |  | ||||||
|                 capture_exception(err) |  | ||||||
|             else: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             _LOGGER.warning("Can't start Add-on %s", addon.slug) |  | ||||||
|  |  | ||||||
|         # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere |  | ||||||
|         await asyncio.gather(*wait_boot, return_exceptions=True) |  | ||||||
|  |  | ||||||
|     async def shutdown(self, stage: AddonStartup) -> None: |  | ||||||
|         """Shutdown addons.""" |  | ||||||
|         tasks: list[Addon] = [] |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if addon.state != AddonState.STARTED or addon.startup != stage: |  | ||||||
|                 continue |  | ||||||
|             tasks.append(addon) |  | ||||||
|  |  | ||||||
|         # Evaluate add-ons which need to be stopped |  | ||||||
|         _LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks)) |  | ||||||
|         if not tasks: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Stop Add-ons sequential |  | ||||||
|         # avoid issue on slow IO |  | ||||||
|         for addon in tasks: |  | ||||||
|             try: |  | ||||||
|                 await addon.stop() |  | ||||||
|             except Exception as err:  # pylint: disable=broad-except |  | ||||||
|                 _LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err) |  | ||||||
|                 capture_exception(err) |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_install", |  | ||||||
|         conditions=ADDON_UPDATE_CONDITIONS, |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def install(self, slug: str) -> None: |  | ||||||
|         """Install an add-on.""" |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug in self.local: |  | ||||||
|             raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning) |  | ||||||
|         store = self.store.get(slug) |  | ||||||
|  |  | ||||||
|         if not store: |  | ||||||
|             raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error) |  | ||||||
|  |  | ||||||
|         store.validate_availability() |  | ||||||
|  |  | ||||||
|         self.data.install(store) |  | ||||||
|         addon = Addon(self.coresys, slug) |  | ||||||
|         await addon.load() |  | ||||||
|  |  | ||||||
|         if not addon.path_data.is_dir(): |  | ||||||
|             _LOGGER.info( |  | ||||||
|                 "Creating Home Assistant add-on data folder %s", addon.path_data |  | ||||||
|             ) |  | ||||||
|             addon.path_data.mkdir() |  | ||||||
|  |  | ||||||
|         # Setup/Fix AppArmor profile |  | ||||||
|         await addon.install_apparmor() |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             await addon.instance.install(store.version, store.image, arch=addon.arch) |  | ||||||
|         except DockerError as err: |  | ||||||
|             self.data.uninstall(addon) |  | ||||||
|             raise AddonsError() from err |  | ||||||
|  |  | ||||||
|         self.local[slug] = addon |  | ||||||
|  |  | ||||||
|         # Reload ingress tokens |  | ||||||
|         if addon.with_ingress: |  | ||||||
|             await self.sys_ingress.reload() |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Add-on '%s' successfully installed", slug) |  | ||||||
|  |  | ||||||
|     async def uninstall(self, slug: str) -> None: |  | ||||||
|         """Remove an add-on.""" |  | ||||||
|         if slug not in self.local: |  | ||||||
|             _LOGGER.warning("Add-on %s is not installed", slug) |  | ||||||
|             return |  | ||||||
|         addon = self.local[slug] |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             await addon.instance.remove() |  | ||||||
|         except DockerError as err: |  | ||||||
|             raise AddonsError() from err |  | ||||||
|  |  | ||||||
|         addon.state = AddonState.UNKNOWN |  | ||||||
|  |  | ||||||
|         await addon.unload() |  | ||||||
|  |  | ||||||
|         # Cleanup audio settings |  | ||||||
|         if addon.path_pulse.exists(): |  | ||||||
|             with suppress(OSError): |  | ||||||
|                 addon.path_pulse.unlink() |  | ||||||
|  |  | ||||||
|         # Cleanup AppArmor profile |  | ||||||
|         with suppress(HostAppArmorError): |  | ||||||
|             await addon.uninstall_apparmor() |  | ||||||
|  |  | ||||||
|         # Cleanup Ingress panel from sidebar |  | ||||||
|         if addon.ingress_panel: |  | ||||||
|             addon.ingress_panel = False |  | ||||||
|             with suppress(HomeAssistantAPIError): |  | ||||||
|                 await self.sys_ingress.update_hass_panel(addon) |  | ||||||
|  |  | ||||||
|         # Cleanup Ingress dynamic port assignment |  | ||||||
|         if addon.with_ingress: |  | ||||||
|             self.sys_create_task(self.sys_ingress.reload()) |  | ||||||
|             self.sys_ingress.del_dynamic_port(slug) |  | ||||||
|  |  | ||||||
|         # Cleanup discovery data |  | ||||||
|         for message in self.sys_discovery.list_messages: |  | ||||||
|             if message.addon != addon.slug: |  | ||||||
|                 continue |  | ||||||
|             self.sys_discovery.remove(message) |  | ||||||
|  |  | ||||||
|         # Cleanup services data |  | ||||||
|         for service in self.sys_services.list_services: |  | ||||||
|             if addon.slug not in service.active: |  | ||||||
|                 continue |  | ||||||
|             service.del_service_data(addon) |  | ||||||
|  |  | ||||||
|         self.data.uninstall(addon) |  | ||||||
|         self.local.pop(slug) |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Add-on '%s' successfully removed", slug) |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_update", |  | ||||||
|         conditions=ADDON_UPDATE_CONDITIONS, |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def update( |  | ||||||
|         self, slug: str, backup: bool | None = False |  | ||||||
|     ) -> Awaitable[None] | None: |  | ||||||
|         """Update add-on. |  | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started' (see addon.start) |  | ||||||
|         if addon is started after update. Else nothing is returned. |  | ||||||
|         """ |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug not in self.local: |  | ||||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) |  | ||||||
|         addon = self.local[slug] |  | ||||||
|  |  | ||||||
|         if addon.is_detached: |  | ||||||
|             raise AddonsError( |  | ||||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|         store = self.store[slug] |  | ||||||
|  |  | ||||||
|         if addon.version == store.version: |  | ||||||
|             raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning) |  | ||||||
|  |  | ||||||
|         # Check if available, Maybe something have changed |  | ||||||
|         store.validate_availability() |  | ||||||
|  |  | ||||||
|         if backup: |  | ||||||
|             await self.sys_backups.do_backup_partial( |  | ||||||
|                 name=f"addon_{addon.slug}_{addon.version}", |  | ||||||
|                 homeassistant=False, |  | ||||||
|                 addons=[addon.slug], |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Update instance |  | ||||||
|         last_state: AddonState = addon.state |  | ||||||
|         old_image = addon.image |  | ||||||
|         try: |  | ||||||
|             await addon.instance.update(store.version, store.image) |  | ||||||
|         except DockerError as err: |  | ||||||
|             raise AddonsError() from err |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Add-on '%s' successfully updated", slug) |  | ||||||
|         self.data.update(store) |  | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         with suppress(DockerError): |  | ||||||
|             await addon.instance.cleanup(old_image=old_image) |  | ||||||
|  |  | ||||||
|         # Setup/Fix AppArmor profile |  | ||||||
|         await addon.install_apparmor() |  | ||||||
|  |  | ||||||
|         # restore state |  | ||||||
|         return ( |  | ||||||
|             await addon.start() |  | ||||||
|             if last_state in [AddonState.STARTED, AddonState.STARTUP] |  | ||||||
|             else None |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_rebuild", |  | ||||||
|         conditions=[ |  | ||||||
|             JobCondition.FREE_SPACE, |  | ||||||
|             JobCondition.INTERNET_HOST, |  | ||||||
|             JobCondition.HEALTHY, |  | ||||||
|         ], |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def rebuild(self, slug: str) -> Awaitable[None] | None: |  | ||||||
|         """Perform a rebuild of local build add-on. |  | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started' (see addon.start) |  | ||||||
|         if addon is started after rebuild. Else nothing is returned. |  | ||||||
|         """ |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug not in self.local: |  | ||||||
|             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) |  | ||||||
|         addon = self.local[slug] |  | ||||||
|  |  | ||||||
|         if addon.is_detached: |  | ||||||
|             raise AddonsError( |  | ||||||
|                 f"Add-on {slug} is not available inside store", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|         store = self.store[slug] |  | ||||||
|  |  | ||||||
|         # Check if a rebuild is possible now |  | ||||||
|         if addon.version != store.version: |  | ||||||
|             raise AddonsError( |  | ||||||
|                 "Version changed, use Update instead Rebuild", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|         if not addon.need_build: |  | ||||||
|             raise AddonsNotSupportedError( |  | ||||||
|                 "Can't rebuild a image based add-on", _LOGGER.error |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # remove docker container but not addon config |  | ||||||
|         last_state: AddonState = addon.state |  | ||||||
|         try: |  | ||||||
|             await addon.instance.remove() |  | ||||||
|             await addon.instance.install(addon.version) |  | ||||||
|         except DockerError as err: |  | ||||||
|             raise AddonsError() from err |  | ||||||
|  |  | ||||||
|         self.data.update(store) |  | ||||||
|         _LOGGER.info("Add-on '%s' successfully rebuilt", slug) |  | ||||||
|  |  | ||||||
|         # restore state |  | ||||||
|         return ( |  | ||||||
|             await addon.start() |  | ||||||
|             if last_state in [AddonState.STARTED, AddonState.STARTUP] |  | ||||||
|             else None |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_restore", |  | ||||||
|         conditions=[ |  | ||||||
|             JobCondition.FREE_SPACE, |  | ||||||
|             JobCondition.INTERNET_HOST, |  | ||||||
|             JobCondition.HEALTHY, |  | ||||||
|         ], |  | ||||||
|         on_condition=AddonsJobError, |  | ||||||
|     ) |  | ||||||
|     async def restore( |  | ||||||
|         self, slug: str, tar_file: tarfile.TarFile |  | ||||||
|     ) -> Awaitable[None] | None: |  | ||||||
|         """Restore state of an add-on. |  | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started' (see addon.start) |  | ||||||
|         if addon is started after restore. Else nothing is returned. |  | ||||||
|         """ |  | ||||||
|         self.sys_jobs.current.reference = slug |  | ||||||
|  |  | ||||||
|         if slug not in self.local: |  | ||||||
|             _LOGGER.debug("Add-on %s is not local available for restore", slug) |  | ||||||
|             addon = Addon(self.coresys, slug) |  | ||||||
|         else: |  | ||||||
|             _LOGGER.debug("Add-on %s is local available for restore", slug) |  | ||||||
|             addon = self.local[slug] |  | ||||||
|  |  | ||||||
|         wait_for_start = await addon.restore(tar_file) |  | ||||||
|  |  | ||||||
|         # Check if new |  | ||||||
|         if slug not in self.local: |  | ||||||
|             _LOGGER.info("Detect new Add-on after restore %s", slug) |  | ||||||
|             self.local[slug] = addon |  | ||||||
|  |  | ||||||
|         # Update ingress |  | ||||||
|         if addon.with_ingress: |  | ||||||
|             await self.sys_ingress.reload() |  | ||||||
|             with suppress(HomeAssistantAPIError): |  | ||||||
|                 await self.sys_ingress.update_hass_panel(addon) |  | ||||||
|  |  | ||||||
|         return wait_for_start |  | ||||||
|  |  | ||||||
|     @Job( |  | ||||||
|         name="addon_manager_repair", |  | ||||||
|         conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST], |  | ||||||
|     ) |  | ||||||
|     async def repair(self) -> None: |  | ||||||
|         """Repair local add-ons.""" |  | ||||||
|         needs_repair: list[Addon] = [] |  | ||||||
|  |  | ||||||
|         # Evaluate Add-ons to repair |  | ||||||
|         for addon in self.installed: |  | ||||||
|             if await addon.instance.exists(): |  | ||||||
|                 continue |  | ||||||
|             needs_repair.append(addon) |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Found %d add-ons to repair", len(needs_repair)) |  | ||||||
|         if not needs_repair: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         for addon in needs_repair: |  | ||||||
|             _LOGGER.info("Repairing for add-on: %s", addon.slug) |  | ||||||
|             with suppress(DockerError, KeyError): |  | ||||||
|                 # Need pull a image again |  | ||||||
|                 if not addon.need_build: |  | ||||||
|                     await addon.instance.install(addon.version, addon.image) |  | ||||||
|                     continue |  | ||||||
|  |  | ||||||
|                 # Need local lookup |  | ||||||
|                 if addon.need_build and not addon.is_detached: |  | ||||||
|                     store = self.store[addon.slug] |  | ||||||
|                     # If this add-on is available for rebuild |  | ||||||
|                     if addon.version == store.version: |  | ||||||
|                         await addon.instance.install(addon.version, addon.image) |  | ||||||
|                         continue |  | ||||||
|  |  | ||||||
|             _LOGGER.error("Can't repair %s", addon.slug) |  | ||||||
|             with suppress(AddonsError): |  | ||||||
|                 await self.uninstall(addon.slug) |  | ||||||
|  |  | ||||||
|     async def sync_dns(self) -> None: |  | ||||||
|         """Sync add-ons DNS names.""" |  | ||||||
|         # Update hosts |  | ||||||
|         add_host_coros: list[Awaitable[None]] = [] |  | ||||||
|         for addon in self.installed: |  | ||||||
|             try: |  | ||||||
|                 if not await addon.instance.is_running(): |  | ||||||
|                     continue |  | ||||||
|             except DockerError as err: |  | ||||||
|                 _LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err) |  | ||||||
|                 self.sys_resolution.create_issue( |  | ||||||
|                     IssueType.CORRUPT_DOCKER, |  | ||||||
|                     ContextType.ADDON, |  | ||||||
|                     reference=addon.slug, |  | ||||||
|                     suggestions=[SuggestionType.EXECUTE_REPAIR], |  | ||||||
|                 ) |  | ||||||
|                 capture_exception(err) |  | ||||||
|             else: |  | ||||||
|                 add_host_coros.append( |  | ||||||
|                     self.sys_plugins.dns.add_host( |  | ||||||
|                         ipv4=addon.ip_address, names=[addon.hostname], write=False |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         await asyncio.gather(*add_host_coros) |  | ||||||
|  |  | ||||||
|         # Write hosts files |  | ||||||
|         with suppress(CoreDNSError): |  | ||||||
|             await self.sys_plugins.dns.write_hosts() |  | ||||||
|   | |||||||
| @@ -3,6 +3,7 @@ import asyncio | |||||||
| from collections.abc import Awaitable | from collections.abc import Awaitable | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
| from copy import deepcopy | from copy import deepcopy | ||||||
|  | import errno | ||||||
| from ipaddress import IPv4Address | from ipaddress import IPv4Address | ||||||
| import logging | import logging | ||||||
| from pathlib import Path, PurePath | from pathlib import Path, PurePath | ||||||
| @@ -64,12 +65,15 @@ from ..exceptions import ( | |||||||
|     AddonsNotSupportedError, |     AddonsNotSupportedError, | ||||||
|     ConfigurationFileError, |     ConfigurationFileError, | ||||||
|     DockerError, |     DockerError, | ||||||
|  |     HomeAssistantAPIError, | ||||||
|     HostAppArmorError, |     HostAppArmorError, | ||||||
| ) | ) | ||||||
| from ..hardware.data import Device | from ..hardware.data import Device | ||||||
| from ..homeassistant.const import WSEvent, WSType | from ..homeassistant.const import WSEvent, WSType | ||||||
| from ..jobs.const import JobExecutionLimit | from ..jobs.const import JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
|  | from ..store.addon import AddonStore | ||||||
| from ..utils import check_port | from ..utils import check_port | ||||||
| from ..utils.apparmor import adjust_profile | from ..utils.apparmor import adjust_profile | ||||||
| from ..utils.json import read_json_file, write_json_file | from ..utils.json import read_json_file, write_json_file | ||||||
| @@ -80,6 +84,7 @@ from .const import ( | |||||||
|     WATCHDOG_THROTTLE_MAX_CALLS, |     WATCHDOG_THROTTLE_MAX_CALLS, | ||||||
|     WATCHDOG_THROTTLE_PERIOD, |     WATCHDOG_THROTTLE_PERIOD, | ||||||
|     AddonBackupMode, |     AddonBackupMode, | ||||||
|  |     MappingType, | ||||||
| ) | ) | ||||||
| from .model import AddonModel, Data | from .model import AddonModel, Data | ||||||
| from .options import AddonOptions | from .options import AddonOptions | ||||||
| @@ -181,6 +186,7 @@ class Addon(AddonModel): | |||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         await self._check_ingress_port() | ||||||
|         with suppress(DockerError): |         with suppress(DockerError): | ||||||
|             await self.instance.attach(version=self.version) |             await self.instance.attach(version=self.version) | ||||||
|  |  | ||||||
| @@ -199,6 +205,11 @@ class Addon(AddonModel): | |||||||
|         """Return add-on data from store.""" |         """Return add-on data from store.""" | ||||||
|         return self.sys_store.data.addons.get(self.slug, self.data) |         return self.sys_store.data.addons.get(self.slug, self.data) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def addon_store(self) -> AddonStore | None: | ||||||
|  |         """Return store representation of addon.""" | ||||||
|  |         return self.sys_addons.store.get(self.slug) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def persist(self) -> Data: |     def persist(self) -> Data: | ||||||
|         """Return add-on data/config.""" |         """Return add-on data/config.""" | ||||||
| @@ -387,7 +398,7 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|         port = self.data[ATTR_INGRESS_PORT] |         port = self.data[ATTR_INGRESS_PORT] | ||||||
|         if port == 0: |         if port == 0: | ||||||
|             return self.sys_ingress.get_dynamic_port(self.slug) |             raise RuntimeError(f"No port set for add-on {self.slug}") | ||||||
|         return port |         return port | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -453,6 +464,21 @@ class Addon(AddonModel): | |||||||
|         """Return add-on data path external for Docker.""" |         """Return add-on data path external for Docker.""" | ||||||
|         return PurePath(self.sys_config.path_extern_addons_data, self.slug) |         return PurePath(self.sys_config.path_extern_addons_data, self.slug) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def addon_config_used(self) -> bool: | ||||||
|  |         """Add-on is using its public config folder.""" | ||||||
|  |         return MappingType.ADDON_CONFIG in self.map_volumes | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_config(self) -> Path: | ||||||
|  |         """Return add-on config path inside Supervisor.""" | ||||||
|  |         return Path(self.sys_config.path_addon_configs, self.slug) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_extern_config(self) -> PurePath: | ||||||
|  |         """Return add-on config path external for Docker.""" | ||||||
|  |         return PurePath(self.sys_config.path_extern_addon_configs, self.slug) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def path_options(self) -> Path: |     def path_options(self) -> Path: | ||||||
|         """Return path to add-on options.""" |         """Return path to add-on options.""" | ||||||
| @@ -516,7 +542,7 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|         # TCP monitoring |         # TCP monitoring | ||||||
|         if s_prefix == "tcp": |         if s_prefix == "tcp": | ||||||
|             return await self.sys_run_in_executor(check_port, self.ip_address, port) |             return await check_port(self.ip_address, port) | ||||||
|  |  | ||||||
|         # lookup the correct protocol from config |         # lookup the correct protocol from config | ||||||
|         if t_proto: |         if t_proto: | ||||||
| @@ -532,7 +558,7 @@ class Addon(AddonModel): | |||||||
|             ) as req: |             ) as req: | ||||||
|                 if req.status < 300: |                 if req.status < 300: | ||||||
|                     return True |                     return True | ||||||
|         except (asyncio.TimeoutError, aiohttp.ClientError): |         except (TimeoutError, aiohttp.ClientError): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         return False |         return False | ||||||
| @@ -559,6 +585,11 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|         raise AddonConfigurationError() |         raise AddonConfigurationError() | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_unload", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|     async def unload(self) -> None: |     async def unload(self) -> None: | ||||||
|         """Unload add-on and remove data.""" |         """Unload add-on and remove data.""" | ||||||
|         if self._startup_task: |         if self._startup_task: | ||||||
| @@ -570,12 +601,186 @@ class Addon(AddonModel): | |||||||
|         for listener in self._listeners: |         for listener in self._listeners: | ||||||
|             self.sys_bus.remove_listener(listener) |             self.sys_bus.remove_listener(listener) | ||||||
|  |  | ||||||
|         if not self.path_data.is_dir(): |         if self.path_data.is_dir(): | ||||||
|             return |  | ||||||
|  |  | ||||||
|             _LOGGER.info("Removing add-on data folder %s", self.path_data) |             _LOGGER.info("Removing add-on data folder %s", self.path_data) | ||||||
|             await remove_data(self.path_data) |             await remove_data(self.path_data) | ||||||
|  |  | ||||||
|  |     async def _check_ingress_port(self): | ||||||
|  |         """Assign a ingress port if dynamic port selection is used.""" | ||||||
|  |         if not self.with_ingress: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if self.data[ATTR_INGRESS_PORT] == 0: | ||||||
|  |             self.data[ATTR_INGRESS_PORT] = await self.sys_ingress.get_dynamic_port( | ||||||
|  |                 self.slug | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_install", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def install(self) -> None: | ||||||
|  |         """Install and setup this addon.""" | ||||||
|  |         self.sys_addons.data.install(self.addon_store) | ||||||
|  |         await self.load() | ||||||
|  |  | ||||||
|  |         if not self.path_data.is_dir(): | ||||||
|  |             _LOGGER.info( | ||||||
|  |                 "Creating Home Assistant add-on data folder %s", self.path_data | ||||||
|  |             ) | ||||||
|  |             self.path_data.mkdir() | ||||||
|  |  | ||||||
|  |         # Setup/Fix AppArmor profile | ||||||
|  |         await self.install_apparmor() | ||||||
|  |  | ||||||
|  |         # Install image | ||||||
|  |         try: | ||||||
|  |             await self.instance.install( | ||||||
|  |                 self.latest_version, self.addon_store.image, arch=self.arch | ||||||
|  |             ) | ||||||
|  |         except DockerError as err: | ||||||
|  |             self.sys_addons.data.uninstall(self) | ||||||
|  |             raise AddonsError() from err | ||||||
|  |  | ||||||
|  |         # Add to addon manager | ||||||
|  |         self.sys_addons.local[self.slug] = self | ||||||
|  |  | ||||||
|  |         # Reload ingress tokens | ||||||
|  |         if self.with_ingress: | ||||||
|  |             await self.sys_ingress.reload() | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_uninstall", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def uninstall(self) -> None: | ||||||
|  |         """Uninstall and cleanup this addon.""" | ||||||
|  |         try: | ||||||
|  |             await self.instance.remove() | ||||||
|  |         except DockerError as err: | ||||||
|  |             raise AddonsError() from err | ||||||
|  |  | ||||||
|  |         self.state = AddonState.UNKNOWN | ||||||
|  |  | ||||||
|  |         await self.unload() | ||||||
|  |  | ||||||
|  |         # Cleanup audio settings | ||||||
|  |         if self.path_pulse.exists(): | ||||||
|  |             with suppress(OSError): | ||||||
|  |                 self.path_pulse.unlink() | ||||||
|  |  | ||||||
|  |         # Cleanup AppArmor profile | ||||||
|  |         with suppress(HostAppArmorError): | ||||||
|  |             await self.uninstall_apparmor() | ||||||
|  |  | ||||||
|  |         # Cleanup Ingress panel from sidebar | ||||||
|  |         if self.ingress_panel: | ||||||
|  |             self.ingress_panel = False | ||||||
|  |             with suppress(HomeAssistantAPIError): | ||||||
|  |                 await self.sys_ingress.update_hass_panel(self) | ||||||
|  |  | ||||||
|  |         # Cleanup Ingress dynamic port assignment | ||||||
|  |         if self.with_ingress: | ||||||
|  |             self.sys_create_task(self.sys_ingress.reload()) | ||||||
|  |             self.sys_ingress.del_dynamic_port(self.slug) | ||||||
|  |  | ||||||
|  |         # Cleanup discovery data | ||||||
|  |         for message in self.sys_discovery.list_messages: | ||||||
|  |             if message.addon != self.slug: | ||||||
|  |                 continue | ||||||
|  |             self.sys_discovery.remove(message) | ||||||
|  |  | ||||||
|  |         # Cleanup services data | ||||||
|  |         for service in self.sys_services.list_services: | ||||||
|  |             if self.slug not in service.active: | ||||||
|  |                 continue | ||||||
|  |             service.del_service_data(self) | ||||||
|  |  | ||||||
|  |         # Remove from addon manager | ||||||
|  |         self.sys_addons.data.uninstall(self) | ||||||
|  |         self.sys_addons.local.pop(self.slug) | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_update", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def update(self) -> asyncio.Task | None: | ||||||
|  |         """Update this addon to latest version. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see start) | ||||||
|  |         if it was running. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         old_image = self.image | ||||||
|  |         # Cache data to prevent races with other updates to global | ||||||
|  |         store = self.addon_store.clone() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             await self.instance.update(store.version, store.image, arch=self.arch) | ||||||
|  |         except DockerError as err: | ||||||
|  |             raise AddonsError() from err | ||||||
|  |  | ||||||
|  |         # Stop the addon if running | ||||||
|  |         if (last_state := self.state) in {AddonState.STARTED, AddonState.STARTUP}: | ||||||
|  |             await self.stop() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             _LOGGER.info("Add-on '%s' successfully updated", self.slug) | ||||||
|  |             self.sys_addons.data.update(store) | ||||||
|  |             await self._check_ingress_port() | ||||||
|  |  | ||||||
|  |             # Cleanup | ||||||
|  |             with suppress(DockerError): | ||||||
|  |                 await self.instance.cleanup( | ||||||
|  |                     old_image=old_image, image=store.image, version=store.version | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |             # Setup/Fix AppArmor profile | ||||||
|  |             await self.install_apparmor() | ||||||
|  |  | ||||||
|  |         finally: | ||||||
|  |             # restore state. Return Task for caller if no exception | ||||||
|  |             out = ( | ||||||
|  |                 await self.start() | ||||||
|  |                 if last_state in {AddonState.STARTED, AddonState.STARTUP} | ||||||
|  |                 else None | ||||||
|  |             ) | ||||||
|  |         return out | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_rebuild", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def rebuild(self) -> asyncio.Task | None: | ||||||
|  |         """Rebuild this addons container and image. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see start) | ||||||
|  |         if it was running. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         last_state: AddonState = self.state | ||||||
|  |         try: | ||||||
|  |             # remove docker container but not addon config | ||||||
|  |             try: | ||||||
|  |                 await self.instance.remove() | ||||||
|  |                 await self.instance.install(self.version) | ||||||
|  |             except DockerError as err: | ||||||
|  |                 raise AddonsError() from err | ||||||
|  |  | ||||||
|  |             self.sys_addons.data.update(self.addon_store) | ||||||
|  |             _LOGGER.info("Add-on '%s' successfully rebuilt", self.slug) | ||||||
|  |  | ||||||
|  |         finally: | ||||||
|  |             # restore state | ||||||
|  |             out = ( | ||||||
|  |                 await self.start() | ||||||
|  |                 if last_state in [AddonState.STARTED, AddonState.STARTUP] | ||||||
|  |                 else None | ||||||
|  |             ) | ||||||
|  |         return out | ||||||
|  |  | ||||||
|     def write_pulse(self) -> None: |     def write_pulse(self) -> None: | ||||||
|         """Write asound config to file and return True on success.""" |         """Write asound config to file and return True on success.""" | ||||||
|         pulse_config = self.sys_plugins.audio.pulse_client( |         pulse_config = self.sys_plugins.audio.pulse_client( | ||||||
| @@ -590,6 +795,8 @@ class Addon(AddonModel): | |||||||
|         try: |         try: | ||||||
|             self.path_pulse.write_text(pulse_config, encoding="utf-8") |             self.path_pulse.write_text(pulse_config, encoding="utf-8") | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error( |             _LOGGER.error( | ||||||
|                 "Add-on %s can't write pulse/client.config: %s", self.slug, err |                 "Add-on %s can't write pulse/client.config: %s", self.slug, err | ||||||
|             ) |             ) | ||||||
| @@ -660,9 +867,9 @@ class Addon(AddonModel): | |||||||
|         try: |         try: | ||||||
|             self._startup_task = self.sys_create_task(self._startup_event.wait()) |             self._startup_task = self.sys_create_task(self._startup_event.wait()) | ||||||
|             await asyncio.wait_for(self._startup_task, STARTUP_TIMEOUT) |             await asyncio.wait_for(self._startup_task, STARTUP_TIMEOUT) | ||||||
|         except asyncio.TimeoutError: |         except TimeoutError: | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "Timeout while waiting for addon %s to start, took more then %s seconds", |                 "Timeout while waiting for addon %s to start, took more than %s seconds", | ||||||
|                 self.name, |                 self.name, | ||||||
|                 STARTUP_TIMEOUT, |                 STARTUP_TIMEOUT, | ||||||
|             ) |             ) | ||||||
| @@ -671,16 +878,21 @@ class Addon(AddonModel): | |||||||
|         finally: |         finally: | ||||||
|             self._startup_task = None |             self._startup_task = None | ||||||
|  |  | ||||||
|     async def start(self) -> Awaitable[None]: |     @Job( | ||||||
|  |         name="addon_start", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def start(self) -> asyncio.Task: | ||||||
|         """Set options and start add-on. |         """Set options and start add-on. | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started'. |         Returns a Task that completes when addon has state 'started'. | ||||||
|         For addons with a healthcheck, that is when they become healthy or unhealthy. |         For addons with a healthcheck, that is when they become healthy or unhealthy. | ||||||
|         Addons without a healthcheck have state 'started' immediately. |         Addons without a healthcheck have state 'started' immediately. | ||||||
|         """ |         """ | ||||||
|         if await self.instance.is_running(): |         if await self.instance.is_running(): | ||||||
|             _LOGGER.warning("%s is already running!", self.slug) |             _LOGGER.warning("%s is already running!", self.slug) | ||||||
|             return self._wait_for_startup() |             return self.sys_create_task(self._wait_for_startup()) | ||||||
|  |  | ||||||
|         # Access Token |         # Access Token | ||||||
|         self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56) |         self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56) | ||||||
| @@ -693,6 +905,18 @@ class Addon(AddonModel): | |||||||
|         if self.with_audio: |         if self.with_audio: | ||||||
|             self.write_pulse() |             self.write_pulse() | ||||||
|  |  | ||||||
|  |         def _check_addon_config_dir(): | ||||||
|  |             if self.path_config.is_dir(): | ||||||
|  |                 return | ||||||
|  |  | ||||||
|  |             _LOGGER.info( | ||||||
|  |                 "Creating Home Assistant add-on config folder %s", self.path_config | ||||||
|  |             ) | ||||||
|  |             self.path_config.mkdir() | ||||||
|  |  | ||||||
|  |         if self.addon_config_used: | ||||||
|  |             await self.sys_run_in_executor(_check_addon_config_dir) | ||||||
|  |  | ||||||
|         # Start Add-on |         # Start Add-on | ||||||
|         self._startup_event.clear() |         self._startup_event.clear() | ||||||
|         try: |         try: | ||||||
| @@ -701,8 +925,13 @@ class Addon(AddonModel): | |||||||
|             self.state = AddonState.ERROR |             self.state = AddonState.ERROR | ||||||
|             raise AddonsError() from err |             raise AddonsError() from err | ||||||
|  |  | ||||||
|         return self._wait_for_startup() |         return self.sys_create_task(self._wait_for_startup()) | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_stop", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|     async def stop(self) -> None: |     async def stop(self) -> None: | ||||||
|         """Stop add-on.""" |         """Stop add-on.""" | ||||||
|         self._manual_stop = True |         self._manual_stop = True | ||||||
| @@ -712,10 +941,15 @@ class Addon(AddonModel): | |||||||
|             self.state = AddonState.ERROR |             self.state = AddonState.ERROR | ||||||
|             raise AddonsError() from err |             raise AddonsError() from err | ||||||
|  |  | ||||||
|     async def restart(self) -> Awaitable[None]: |     @Job( | ||||||
|  |         name="addon_restart", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def restart(self) -> asyncio.Task: | ||||||
|         """Restart add-on. |         """Restart add-on. | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started' (see start). |         Returns a Task that completes when addon has state 'started' (see start). | ||||||
|         """ |         """ | ||||||
|         with suppress(AddonsError): |         with suppress(AddonsError): | ||||||
|             await self.stop() |             await self.stop() | ||||||
| @@ -742,6 +976,11 @@ class Addon(AddonModel): | |||||||
|         except DockerError as err: |         except DockerError as err: | ||||||
|             raise AddonsError() from err |             raise AddonsError() from err | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_write_stdin", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|     async def write_stdin(self, data) -> None: |     async def write_stdin(self, data) -> None: | ||||||
|         """Write data to add-on stdin.""" |         """Write data to add-on stdin.""" | ||||||
|         if not self.with_stdin: |         if not self.with_stdin: | ||||||
| @@ -771,7 +1010,11 @@ class Addon(AddonModel): | |||||||
|                 _LOGGER.error, |                 _LOGGER.error, | ||||||
|             ) from err |             ) from err | ||||||
|  |  | ||||||
|     @Job(name="addon_begin_backup") |     @Job( | ||||||
|  |         name="addon_begin_backup", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|     async def begin_backup(self) -> bool: |     async def begin_backup(self) -> bool: | ||||||
|         """Execute pre commands or stop addon if necessary. |         """Execute pre commands or stop addon if necessary. | ||||||
|  |  | ||||||
| @@ -782,21 +1025,22 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|         if self.backup_mode == AddonBackupMode.COLD: |         if self.backup_mode == AddonBackupMode.COLD: | ||||||
|             _LOGGER.info("Shutdown add-on %s for cold backup", self.slug) |             _LOGGER.info("Shutdown add-on %s for cold backup", self.slug) | ||||||
|             try: |             await self.stop() | ||||||
|                 await self.instance.stop() |  | ||||||
|             except DockerError as err: |  | ||||||
|                 raise AddonsError() from err |  | ||||||
|  |  | ||||||
|         elif self.backup_pre is not None: |         elif self.backup_pre is not None: | ||||||
|             await self._backup_command(self.backup_pre) |             await self._backup_command(self.backup_pre) | ||||||
|  |  | ||||||
|         return True |         return True | ||||||
|  |  | ||||||
|     @Job(name="addon_end_backup") |     @Job( | ||||||
|     async def end_backup(self) -> Awaitable[None] | None: |         name="addon_end_backup", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def end_backup(self) -> asyncio.Task | None: | ||||||
|         """Execute post commands or restart addon if necessary. |         """Execute post commands or restart addon if necessary. | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started' (see start) |         Returns a Task that completes when addon has state 'started' (see start) | ||||||
|         for cold backup. Else nothing is returned. |         for cold backup. Else nothing is returned. | ||||||
|         """ |         """ | ||||||
|         if self.backup_mode is AddonBackupMode.COLD: |         if self.backup_mode is AddonBackupMode.COLD: | ||||||
| @@ -807,11 +1051,15 @@ class Addon(AddonModel): | |||||||
|             await self._backup_command(self.backup_post) |             await self._backup_command(self.backup_post) | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     @Job(name="addon_backup") |     @Job( | ||||||
|     async def backup(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None: |         name="addon_backup", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def backup(self, tar_file: tarfile.TarFile) -> asyncio.Task | None: | ||||||
|         """Backup state of an add-on. |         """Backup state of an add-on. | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started' (see start) |         Returns a Task that completes when addon has state 'started' (see start) | ||||||
|         for cold backup. Else nothing is returned. |         for cold backup. Else nothing is returned. | ||||||
|         """ |         """ | ||||||
|         wait_for_start: Awaitable[None] | None = None |         wait_for_start: Awaitable[None] | None = None | ||||||
| @@ -866,6 +1114,15 @@ class Addon(AddonModel): | |||||||
|                         arcname="data", |                         arcname="data", | ||||||
|                     ) |                     ) | ||||||
|  |  | ||||||
|  |                     # Backup config | ||||||
|  |                     if self.addon_config_used: | ||||||
|  |                         atomic_contents_add( | ||||||
|  |                             backup, | ||||||
|  |                             self.path_config, | ||||||
|  |                             excludes=self.backup_exclude, | ||||||
|  |                             arcname="config", | ||||||
|  |                         ) | ||||||
|  |  | ||||||
|             is_running = await self.begin_backup() |             is_running = await self.begin_backup() | ||||||
|             try: |             try: | ||||||
|                 _LOGGER.info("Building backup for add-on %s", self.slug) |                 _LOGGER.info("Building backup for add-on %s", self.slug) | ||||||
| @@ -881,10 +1138,15 @@ class Addon(AddonModel): | |||||||
|         _LOGGER.info("Finish backup for addon %s", self.slug) |         _LOGGER.info("Finish backup for addon %s", self.slug) | ||||||
|         return wait_for_start |         return wait_for_start | ||||||
|  |  | ||||||
|     async def restore(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None: |     @Job( | ||||||
|  |         name="addon_restore", | ||||||
|  |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def restore(self, tar_file: tarfile.TarFile) -> asyncio.Task | None: | ||||||
|         """Restore state of an add-on. |         """Restore state of an add-on. | ||||||
|  |  | ||||||
|         Returns a coroutine that completes when addon has state 'started' (see start) |         Returns a Task that completes when addon has state 'started' (see start) | ||||||
|         if addon is started after restore. Else nothing is returned. |         if addon is started after restore. Else nothing is returned. | ||||||
|         """ |         """ | ||||||
|         wait_for_start: Awaitable[None] | None = None |         wait_for_start: Awaitable[None] | None = None | ||||||
| @@ -893,7 +1155,11 @@ class Addon(AddonModel): | |||||||
|             def _extract_tarfile(): |             def _extract_tarfile(): | ||||||
|                 """Extract tar backup.""" |                 """Extract tar backup.""" | ||||||
|                 with tar_file as backup: |                 with tar_file as backup: | ||||||
|                     backup.extractall(path=Path(temp), members=secure_path(backup)) |                     backup.extractall( | ||||||
|  |                         path=Path(temp), | ||||||
|  |                         members=secure_path(backup), | ||||||
|  |                         filter="fully_trusted", | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 await self.sys_run_in_executor(_extract_tarfile) |                 await self.sys_run_in_executor(_extract_tarfile) | ||||||
| @@ -933,9 +1199,9 @@ class Addon(AddonModel): | |||||||
|  |  | ||||||
|             # Stop it first if its running |             # Stop it first if its running | ||||||
|             if await self.instance.is_running(): |             if await self.instance.is_running(): | ||||||
|                 with suppress(DockerError): |                 await self.stop() | ||||||
|                     await self.instance.stop() |  | ||||||
|  |  | ||||||
|  |             try: | ||||||
|                 # Check version / restore image |                 # Check version / restore image | ||||||
|                 version = data[ATTR_VERSION] |                 version = data[ATTR_VERSION] | ||||||
|                 if not await self.instance.exists(): |                 if not await self.instance.exists(): | ||||||
| @@ -947,25 +1213,37 @@ class Addon(AddonModel): | |||||||
|                             await self.instance.import_image(image_file) |                             await self.instance.import_image(image_file) | ||||||
|                     else: |                     else: | ||||||
|                         with suppress(DockerError): |                         with suppress(DockerError): | ||||||
|                         await self.instance.install(version, restore_image) |                             await self.instance.install( | ||||||
|  |                                 version, restore_image, self.arch | ||||||
|  |                             ) | ||||||
|                             await self.instance.cleanup() |                             await self.instance.cleanup() | ||||||
|                 elif self.instance.version != version or self.legacy: |                 elif self.instance.version != version or self.legacy: | ||||||
|                     _LOGGER.info("Restore/Update of image for addon %s", self.slug) |                     _LOGGER.info("Restore/Update of image for addon %s", self.slug) | ||||||
|                     with suppress(DockerError): |                     with suppress(DockerError): | ||||||
|                     await self.instance.update(version, restore_image) |                         await self.instance.update(version, restore_image, self.arch) | ||||||
|  |                 self._check_ingress_port() | ||||||
|  |  | ||||||
|             # Restore data |                 # Restore data and config | ||||||
|                 def _restore_data(): |                 def _restore_data(): | ||||||
|                 """Restore data.""" |                     """Restore data and config.""" | ||||||
|                     temp_data = Path(temp, "data") |                     temp_data = Path(temp, "data") | ||||||
|                     if temp_data.is_dir(): |                     if temp_data.is_dir(): | ||||||
|                         shutil.copytree(temp_data, self.path_data, symlinks=True) |                         shutil.copytree(temp_data, self.path_data, symlinks=True) | ||||||
|                     else: |                     else: | ||||||
|                         self.path_data.mkdir() |                         self.path_data.mkdir() | ||||||
|  |  | ||||||
|             _LOGGER.info("Restoring data for addon %s", self.slug) |                     temp_config = Path(temp, "config") | ||||||
|  |                     if temp_config.is_dir(): | ||||||
|  |                         shutil.copytree(temp_config, self.path_config, symlinks=True) | ||||||
|  |                     elif self.addon_config_used: | ||||||
|  |                         self.path_config.mkdir() | ||||||
|  |  | ||||||
|  |                 _LOGGER.info("Restoring data and config for addon %s", self.slug) | ||||||
|                 if self.path_data.is_dir(): |                 if self.path_data.is_dir(): | ||||||
|                     await remove_data(self.path_data) |                     await remove_data(self.path_data) | ||||||
|  |                 if self.path_config.is_dir(): | ||||||
|  |                     await remove_data(self.path_config) | ||||||
|  |  | ||||||
|                 try: |                 try: | ||||||
|                     await self.sys_run_in_executor(_restore_data) |                     await self.sys_run_in_executor(_restore_data) | ||||||
|                 except shutil.Error as err: |                 except shutil.Error as err: | ||||||
| @@ -977,7 +1255,9 @@ class Addon(AddonModel): | |||||||
|                 profile_file = Path(temp, "apparmor.txt") |                 profile_file = Path(temp, "apparmor.txt") | ||||||
|                 if profile_file.exists(): |                 if profile_file.exists(): | ||||||
|                     try: |                     try: | ||||||
|                     await self.sys_host.apparmor.load_profile(self.slug, profile_file) |                         await self.sys_host.apparmor.load_profile( | ||||||
|  |                             self.slug, profile_file | ||||||
|  |                         ) | ||||||
|                     except HostAppArmorError as err: |                     except HostAppArmorError as err: | ||||||
|                         _LOGGER.error( |                         _LOGGER.error( | ||||||
|                             "Can't restore AppArmor profile for add-on %s", self.slug |                             "Can't restore AppArmor profile for add-on %s", self.slug | ||||||
| @@ -988,6 +1268,7 @@ class Addon(AddonModel): | |||||||
|                 if not self.loaded: |                 if not self.loaded: | ||||||
|                     await self.load() |                     await self.load() | ||||||
|  |  | ||||||
|  |             finally: | ||||||
|                 # Run add-on |                 # Run add-on | ||||||
|                 if data[ATTR_STATE] == AddonState.STARTED: |                 if data[ATTR_STATE] == AddonState.STARTED: | ||||||
|                     wait_for_start = await self.start() |                     wait_for_start = await self.start() | ||||||
|   | |||||||
							
								
								
									
										11
									
								
								supervisor/addons/configuration.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								supervisor/addons/configuration.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | """Confgiuration Objects for Addon Config.""" | ||||||
|  |  | ||||||
|  | from dataclasses import dataclass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @dataclass(slots=True) | ||||||
|  | class FolderMapping: | ||||||
|  |     """Represent folder mapping configuration.""" | ||||||
|  |  | ||||||
|  |     path: str | None | ||||||
|  |     read_only: bool | ||||||
| @@ -12,8 +12,25 @@ class AddonBackupMode(StrEnum): | |||||||
|     COLD = "cold" |     COLD = "cold" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MappingType(StrEnum): | ||||||
|  |     """Mapping type of an Add-on Folder.""" | ||||||
|  |  | ||||||
|  |     DATA = "data" | ||||||
|  |     CONFIG = "config" | ||||||
|  |     SSL = "ssl" | ||||||
|  |     ADDONS = "addons" | ||||||
|  |     BACKUP = "backup" | ||||||
|  |     SHARE = "share" | ||||||
|  |     MEDIA = "media" | ||||||
|  |     HOMEASSISTANT_CONFIG = "homeassistant_config" | ||||||
|  |     ALL_ADDON_CONFIGS = "all_addon_configs" | ||||||
|  |     ADDON_CONFIG = "addon_config" | ||||||
|  |  | ||||||
|  |  | ||||||
| ATTR_BACKUP = "backup" | ATTR_BACKUP = "backup" | ||||||
| ATTR_CODENOTARY = "codenotary" | ATTR_CODENOTARY = "codenotary" | ||||||
|  | ATTR_READ_ONLY = "read_only" | ||||||
|  | ATTR_PATH = "path" | ||||||
| WATCHDOG_RETRY_SECONDS = 10 | WATCHDOG_RETRY_SECONDS = 10 | ||||||
| WATCHDOG_MAX_ATTEMPTS = 5 | WATCHDOG_MAX_ATTEMPTS = 5 | ||||||
| WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30) | WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30) | ||||||
|   | |||||||
							
								
								
									
										374
									
								
								supervisor/addons/manager.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										374
									
								
								supervisor/addons/manager.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,374 @@ | |||||||
|  | """Supervisor add-on manager.""" | ||||||
|  | import asyncio | ||||||
|  | from collections.abc import Awaitable | ||||||
|  | from contextlib import suppress | ||||||
|  | import logging | ||||||
|  | import tarfile | ||||||
|  | from typing import Union | ||||||
|  |  | ||||||
|  | from ..const import AddonBoot, AddonStartup, AddonState | ||||||
|  | from ..coresys import CoreSys, CoreSysAttributes | ||||||
|  | from ..exceptions import ( | ||||||
|  |     AddonConfigurationError, | ||||||
|  |     AddonsError, | ||||||
|  |     AddonsJobError, | ||||||
|  |     AddonsNotSupportedError, | ||||||
|  |     CoreDNSError, | ||||||
|  |     DockerAPIError, | ||||||
|  |     DockerError, | ||||||
|  |     DockerNotFound, | ||||||
|  |     HassioError, | ||||||
|  |     HomeAssistantAPIError, | ||||||
|  | ) | ||||||
|  | from ..jobs.decorator import Job, JobCondition | ||||||
|  | from ..resolution.const import ContextType, IssueType, SuggestionType | ||||||
|  | from ..store.addon import AddonStore | ||||||
|  | from ..utils import check_exception_chain | ||||||
|  | from ..utils.sentry import capture_exception | ||||||
|  | from .addon import Addon | ||||||
|  | from .const import ADDON_UPDATE_CONDITIONS | ||||||
|  | from .data import AddonsData | ||||||
|  |  | ||||||
|  | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | AnyAddon = Union[Addon, AddonStore] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AddonManager(CoreSysAttributes): | ||||||
|  |     """Manage add-ons inside Supervisor.""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coresys: CoreSys): | ||||||
|  |         """Initialize Docker base wrapper.""" | ||||||
|  |         self.coresys: CoreSys = coresys | ||||||
|  |         self.data: AddonsData = AddonsData(coresys) | ||||||
|  |         self.local: dict[str, Addon] = {} | ||||||
|  |         self.store: dict[str, AddonStore] = {} | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def all(self) -> list[AnyAddon]: | ||||||
|  |         """Return a list of all add-ons.""" | ||||||
|  |         addons: dict[str, AnyAddon] = {**self.store, **self.local} | ||||||
|  |         return list(addons.values()) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def installed(self) -> list[Addon]: | ||||||
|  |         """Return a list of all installed add-ons.""" | ||||||
|  |         return list(self.local.values()) | ||||||
|  |  | ||||||
|  |     def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None: | ||||||
|  |         """Return an add-on from slug. | ||||||
|  |  | ||||||
|  |         Prio: | ||||||
|  |           1 - Local | ||||||
|  |           2 - Store | ||||||
|  |         """ | ||||||
|  |         if addon_slug in self.local: | ||||||
|  |             return self.local[addon_slug] | ||||||
|  |         if not local_only: | ||||||
|  |             return self.store.get(addon_slug) | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     def from_token(self, token: str) -> Addon | None: | ||||||
|  |         """Return an add-on from Supervisor token.""" | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if token == addon.supervisor_token: | ||||||
|  |                 return addon | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     async def load(self) -> None: | ||||||
|  |         """Start up add-on management.""" | ||||||
|  |         tasks = [] | ||||||
|  |         for slug in self.data.system: | ||||||
|  |             addon = self.local[slug] = Addon(self.coresys, slug) | ||||||
|  |             tasks.append(self.sys_create_task(addon.load())) | ||||||
|  |  | ||||||
|  |         # Run initial tasks | ||||||
|  |         _LOGGER.info("Found %d installed add-ons", len(tasks)) | ||||||
|  |         if tasks: | ||||||
|  |             await asyncio.wait(tasks) | ||||||
|  |  | ||||||
|  |         # Sync DNS | ||||||
|  |         await self.sync_dns() | ||||||
|  |  | ||||||
|  |     async def boot(self, stage: AddonStartup) -> None: | ||||||
|  |         """Boot add-ons with mode auto.""" | ||||||
|  |         tasks: list[Addon] = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if addon.boot != AddonBoot.AUTO or addon.startup != stage: | ||||||
|  |                 continue | ||||||
|  |             tasks.append(addon) | ||||||
|  |  | ||||||
|  |         # Evaluate add-ons which need to be started | ||||||
|  |         _LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks)) | ||||||
|  |         if not tasks: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Start Add-ons sequential | ||||||
|  |         # avoid issue on slow IO | ||||||
|  |         # Config.wait_boot is deprecated. Until addons update with healthchecks, | ||||||
|  |         # add a sleep task for it to keep the same minimum amount of wait time | ||||||
|  |         wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)] | ||||||
|  |         for addon in tasks: | ||||||
|  |             try: | ||||||
|  |                 if start_task := await addon.start(): | ||||||
|  |                     wait_boot.append(start_task) | ||||||
|  |             except AddonsError as err: | ||||||
|  |                 # Check if there is an system/user issue | ||||||
|  |                 if check_exception_chain( | ||||||
|  |                     err, (DockerAPIError, DockerNotFound, AddonConfigurationError) | ||||||
|  |                 ): | ||||||
|  |                     addon.boot = AddonBoot.MANUAL | ||||||
|  |                     addon.save_persist() | ||||||
|  |             except HassioError: | ||||||
|  |                 pass  # These are already handled | ||||||
|  |             else: | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             _LOGGER.warning("Can't start Add-on %s", addon.slug) | ||||||
|  |  | ||||||
|  |         # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere | ||||||
|  |         await asyncio.gather(*wait_boot, return_exceptions=True) | ||||||
|  |  | ||||||
|  |     async def shutdown(self, stage: AddonStartup) -> None: | ||||||
|  |         """Shutdown addons.""" | ||||||
|  |         tasks: list[Addon] = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if addon.state != AddonState.STARTED or addon.startup != stage: | ||||||
|  |                 continue | ||||||
|  |             tasks.append(addon) | ||||||
|  |  | ||||||
|  |         # Evaluate add-ons which need to be stopped | ||||||
|  |         _LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks)) | ||||||
|  |         if not tasks: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Stop Add-ons sequential | ||||||
|  |         # avoid issue on slow IO | ||||||
|  |         for addon in tasks: | ||||||
|  |             try: | ||||||
|  |                 await addon.stop() | ||||||
|  |             except Exception as err:  # pylint: disable=broad-except | ||||||
|  |                 _LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err) | ||||||
|  |                 capture_exception(err) | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_install", | ||||||
|  |         conditions=ADDON_UPDATE_CONDITIONS, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def install(self, slug: str) -> None: | ||||||
|  |         """Install an add-on.""" | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug in self.local: | ||||||
|  |             raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning) | ||||||
|  |         store = self.store.get(slug) | ||||||
|  |  | ||||||
|  |         if not store: | ||||||
|  |             raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error) | ||||||
|  |  | ||||||
|  |         store.validate_availability() | ||||||
|  |  | ||||||
|  |         await Addon(self.coresys, slug).install() | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Add-on '%s' successfully installed", slug) | ||||||
|  |  | ||||||
|  |     async def uninstall(self, slug: str) -> None: | ||||||
|  |         """Remove an add-on.""" | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.warning("Add-on %s is not installed", slug) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         await self.local[slug].uninstall() | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Add-on '%s' successfully removed", slug) | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_update", | ||||||
|  |         conditions=ADDON_UPDATE_CONDITIONS, | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def update( | ||||||
|  |         self, slug: str, backup: bool | None = False | ||||||
|  |     ) -> asyncio.Task | None: | ||||||
|  |         """Update add-on. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||||
|  |         if addon is started after update. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug not in self.local: | ||||||
|  |             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||||
|  |         addon = self.local[slug] | ||||||
|  |  | ||||||
|  |         if addon.is_detached: | ||||||
|  |             raise AddonsError( | ||||||
|  |                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |         store = self.store[slug] | ||||||
|  |  | ||||||
|  |         if addon.version == store.version: | ||||||
|  |             raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning) | ||||||
|  |  | ||||||
|  |         # Check if available, Maybe something have changed | ||||||
|  |         store.validate_availability() | ||||||
|  |  | ||||||
|  |         if backup: | ||||||
|  |             await self.sys_backups.do_backup_partial( | ||||||
|  |                 name=f"addon_{addon.slug}_{addon.version}", | ||||||
|  |                 homeassistant=False, | ||||||
|  |                 addons=[addon.slug], | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return await addon.update() | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_rebuild", | ||||||
|  |         conditions=[ | ||||||
|  |             JobCondition.FREE_SPACE, | ||||||
|  |             JobCondition.INTERNET_HOST, | ||||||
|  |             JobCondition.HEALTHY, | ||||||
|  |         ], | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def rebuild(self, slug: str) -> asyncio.Task | None: | ||||||
|  |         """Perform a rebuild of local build add-on. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||||
|  |         if addon is started after rebuild. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug not in self.local: | ||||||
|  |             raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error) | ||||||
|  |         addon = self.local[slug] | ||||||
|  |  | ||||||
|  |         if addon.is_detached: | ||||||
|  |             raise AddonsError( | ||||||
|  |                 f"Add-on {slug} is not available inside store", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |         store = self.store[slug] | ||||||
|  |  | ||||||
|  |         # Check if a rebuild is possible now | ||||||
|  |         if addon.version != store.version: | ||||||
|  |             raise AddonsError( | ||||||
|  |                 "Version changed, use Update instead Rebuild", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |         if not addon.need_build: | ||||||
|  |             raise AddonsNotSupportedError( | ||||||
|  |                 "Can't rebuild a image based add-on", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return await addon.rebuild() | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_restore", | ||||||
|  |         conditions=[ | ||||||
|  |             JobCondition.FREE_SPACE, | ||||||
|  |             JobCondition.INTERNET_HOST, | ||||||
|  |             JobCondition.HEALTHY, | ||||||
|  |         ], | ||||||
|  |         on_condition=AddonsJobError, | ||||||
|  |     ) | ||||||
|  |     async def restore( | ||||||
|  |         self, slug: str, tar_file: tarfile.TarFile | ||||||
|  |     ) -> asyncio.Task | None: | ||||||
|  |         """Restore state of an add-on. | ||||||
|  |  | ||||||
|  |         Returns a Task that completes when addon has state 'started' (see addon.start) | ||||||
|  |         if addon is started after restore. Else nothing is returned. | ||||||
|  |         """ | ||||||
|  |         self.sys_jobs.current.reference = slug | ||||||
|  |  | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.debug("Add-on %s is not local available for restore", slug) | ||||||
|  |             addon = Addon(self.coresys, slug) | ||||||
|  |             had_ingress = False | ||||||
|  |         else: | ||||||
|  |             _LOGGER.debug("Add-on %s is local available for restore", slug) | ||||||
|  |             addon = self.local[slug] | ||||||
|  |             had_ingress = addon.ingress_panel | ||||||
|  |  | ||||||
|  |         wait_for_start = await addon.restore(tar_file) | ||||||
|  |  | ||||||
|  |         # Check if new | ||||||
|  |         if slug not in self.local: | ||||||
|  |             _LOGGER.info("Detect new Add-on after restore %s", slug) | ||||||
|  |             self.local[slug] = addon | ||||||
|  |  | ||||||
|  |         # Update ingress | ||||||
|  |         if had_ingress != addon.ingress_panel: | ||||||
|  |             await self.sys_ingress.reload() | ||||||
|  |             with suppress(HomeAssistantAPIError): | ||||||
|  |                 await self.sys_ingress.update_hass_panel(addon) | ||||||
|  |  | ||||||
|  |         return wait_for_start | ||||||
|  |  | ||||||
|  |     @Job( | ||||||
|  |         name="addon_manager_repair", | ||||||
|  |         conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST], | ||||||
|  |     ) | ||||||
|  |     async def repair(self) -> None: | ||||||
|  |         """Repair local add-ons.""" | ||||||
|  |         needs_repair: list[Addon] = [] | ||||||
|  |  | ||||||
|  |         # Evaluate Add-ons to repair | ||||||
|  |         for addon in self.installed: | ||||||
|  |             if await addon.instance.exists(): | ||||||
|  |                 continue | ||||||
|  |             needs_repair.append(addon) | ||||||
|  |  | ||||||
|  |         _LOGGER.info("Found %d add-ons to repair", len(needs_repair)) | ||||||
|  |         if not needs_repair: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         for addon in needs_repair: | ||||||
|  |             _LOGGER.info("Repairing for add-on: %s", addon.slug) | ||||||
|  |             with suppress(DockerError, KeyError): | ||||||
|  |                 # Need pull a image again | ||||||
|  |                 if not addon.need_build: | ||||||
|  |                     await addon.instance.install(addon.version, addon.image) | ||||||
|  |                     continue | ||||||
|  |  | ||||||
|  |                 # Need local lookup | ||||||
|  |                 if addon.need_build and not addon.is_detached: | ||||||
|  |                     store = self.store[addon.slug] | ||||||
|  |                     # If this add-on is available for rebuild | ||||||
|  |                     if addon.version == store.version: | ||||||
|  |                         await addon.instance.install(addon.version, addon.image) | ||||||
|  |                         continue | ||||||
|  |  | ||||||
|  |             _LOGGER.error("Can't repair %s", addon.slug) | ||||||
|  |             with suppress(AddonsError): | ||||||
|  |                 await self.uninstall(addon.slug) | ||||||
|  |  | ||||||
|  |     async def sync_dns(self) -> None: | ||||||
|  |         """Sync add-ons DNS names.""" | ||||||
|  |         # Update hosts | ||||||
|  |         add_host_coros: list[Awaitable[None]] = [] | ||||||
|  |         for addon in self.installed: | ||||||
|  |             try: | ||||||
|  |                 if not await addon.instance.is_running(): | ||||||
|  |                     continue | ||||||
|  |             except DockerError as err: | ||||||
|  |                 _LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err) | ||||||
|  |                 self.sys_resolution.create_issue( | ||||||
|  |                     IssueType.CORRUPT_DOCKER, | ||||||
|  |                     ContextType.ADDON, | ||||||
|  |                     reference=addon.slug, | ||||||
|  |                     suggestions=[SuggestionType.EXECUTE_REPAIR], | ||||||
|  |                 ) | ||||||
|  |                 capture_exception(err) | ||||||
|  |             else: | ||||||
|  |                 add_host_coros.append( | ||||||
|  |                     self.sys_plugins.dns.add_host( | ||||||
|  |                         ipv4=addon.ip_address, names=[addon.hostname], write=False | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |         await asyncio.gather(*add_host_coros) | ||||||
|  |  | ||||||
|  |         # Write hosts files | ||||||
|  |         with suppress(CoreDNSError): | ||||||
|  |             await self.sys_plugins.dns.write_hosts() | ||||||
| @@ -1,7 +1,7 @@ | |||||||
| """Init file for Supervisor add-ons.""" | """Init file for Supervisor add-ons.""" | ||||||
| from abc import ABC, abstractmethod | from abc import ABC, abstractmethod | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| from collections.abc import Awaitable, Callable | from collections.abc import Callable | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| @@ -65,6 +65,7 @@ from ..const import ( | |||||||
|     ATTR_TIMEOUT, |     ATTR_TIMEOUT, | ||||||
|     ATTR_TMPFS, |     ATTR_TMPFS, | ||||||
|     ATTR_TRANSLATIONS, |     ATTR_TRANSLATIONS, | ||||||
|  |     ATTR_TYPE, | ||||||
|     ATTR_UART, |     ATTR_UART, | ||||||
|     ATTR_UDEV, |     ATTR_UDEV, | ||||||
|     ATTR_URL, |     ATTR_URL, | ||||||
| @@ -85,9 +86,18 @@ from ..docker.const import Capabilities | |||||||
| from ..exceptions import AddonsNotSupportedError | from ..exceptions import AddonsNotSupportedError | ||||||
| from ..jobs.const import JOB_GROUP_ADDON | from ..jobs.const import JOB_GROUP_ADDON | ||||||
| from ..jobs.job_group import JobGroup | from ..jobs.job_group import JobGroup | ||||||
| from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode | from ..utils import version_is_new_enough | ||||||
|  | from .configuration import FolderMapping | ||||||
|  | from .const import ( | ||||||
|  |     ATTR_BACKUP, | ||||||
|  |     ATTR_CODENOTARY, | ||||||
|  |     ATTR_PATH, | ||||||
|  |     ATTR_READ_ONLY, | ||||||
|  |     AddonBackupMode, | ||||||
|  |     MappingType, | ||||||
|  | ) | ||||||
| from .options import AddonOptions, UiOptions | from .options import AddonOptions, UiOptions | ||||||
| from .validate import RE_SERVICE, RE_VOLUME | from .validate import RE_SERVICE | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -537,14 +547,13 @@ class AddonModel(JobGroup, ABC): | |||||||
|         return ATTR_IMAGE not in self.data |         return ATTR_IMAGE not in self.data | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def map_volumes(self) -> dict[str, bool]: |     def map_volumes(self) -> dict[MappingType, FolderMapping]: | ||||||
|         """Return a dict of {volume: read-only} from add-on.""" |         """Return a dict of {MappingType: FolderMapping} from add-on.""" | ||||||
|         volumes = {} |         volumes = {} | ||||||
|         for volume in self.data[ATTR_MAP]: |         for volume in self.data[ATTR_MAP]: | ||||||
|             result = RE_VOLUME.match(volume) |             volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping( | ||||||
|             if not result: |                 volume.get(ATTR_PATH), volume[ATTR_READ_ONLY] | ||||||
|                 continue |             ) | ||||||
|             volumes[result.group(1)] = result.group(2) != "rw" |  | ||||||
|  |  | ||||||
|         return volumes |         return volumes | ||||||
|  |  | ||||||
| @@ -645,7 +654,9 @@ class AddonModel(JobGroup, ABC): | |||||||
|         # Home Assistant |         # Home Assistant | ||||||
|         version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT) |         version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT) | ||||||
|         with suppress(AwesomeVersionException, TypeError): |         with suppress(AwesomeVersionException, TypeError): | ||||||
|             if self.sys_homeassistant.version < version: |             if version and not version_is_new_enough( | ||||||
|  |                 self.sys_homeassistant.version, version | ||||||
|  |             ): | ||||||
|                 raise AddonsNotSupportedError( |                 raise AddonsNotSupportedError( | ||||||
|                     f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater", |                     f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater", | ||||||
|                     logger, |                     logger, | ||||||
| @@ -669,19 +680,3 @@ class AddonModel(JobGroup, ABC): | |||||||
|  |  | ||||||
|         # local build |         # local build | ||||||
|         return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}" |         return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}" | ||||||
|  |  | ||||||
|     def install(self) -> Awaitable[None]: |  | ||||||
|         """Install this add-on.""" |  | ||||||
|         return self.sys_addons.install(self.slug) |  | ||||||
|  |  | ||||||
|     def uninstall(self) -> Awaitable[None]: |  | ||||||
|         """Uninstall this add-on.""" |  | ||||||
|         return self.sys_addons.uninstall(self.slug) |  | ||||||
|  |  | ||||||
|     def update(self, backup: bool | None = False) -> Awaitable[Awaitable[None] | None]: |  | ||||||
|         """Update this add-on.""" |  | ||||||
|         return self.sys_addons.update(self.slug, backup=backup) |  | ||||||
|  |  | ||||||
|     def rebuild(self) -> Awaitable[Awaitable[None] | None]: |  | ||||||
|         """Rebuild this add-on.""" |  | ||||||
|         return self.sys_addons.rebuild(self.slug) |  | ||||||
|   | |||||||
| @@ -81,6 +81,7 @@ from ..const import ( | |||||||
|     ATTR_TIMEOUT, |     ATTR_TIMEOUT, | ||||||
|     ATTR_TMPFS, |     ATTR_TMPFS, | ||||||
|     ATTR_TRANSLATIONS, |     ATTR_TRANSLATIONS, | ||||||
|  |     ATTR_TYPE, | ||||||
|     ATTR_UART, |     ATTR_UART, | ||||||
|     ATTR_UDEV, |     ATTR_UDEV, | ||||||
|     ATTR_URL, |     ATTR_URL, | ||||||
| @@ -109,12 +110,22 @@ from ..validate import ( | |||||||
|     uuid_match, |     uuid_match, | ||||||
|     version_tag, |     version_tag, | ||||||
| ) | ) | ||||||
| from .const import ATTR_BACKUP, ATTR_CODENOTARY, RE_SLUG, AddonBackupMode | from .const import ( | ||||||
|  |     ATTR_BACKUP, | ||||||
|  |     ATTR_CODENOTARY, | ||||||
|  |     ATTR_PATH, | ||||||
|  |     ATTR_READ_ONLY, | ||||||
|  |     RE_SLUG, | ||||||
|  |     AddonBackupMode, | ||||||
|  |     MappingType, | ||||||
|  | ) | ||||||
| from .options import RE_SCHEMA_ELEMENT | from .options import RE_SCHEMA_ELEMENT | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$") | RE_VOLUME = re.compile( | ||||||
|  |     r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$" | ||||||
|  | ) | ||||||
| RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") | RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -143,6 +154,7 @@ RE_MACHINE = re.compile( | |||||||
|     r"|raspberrypi3" |     r"|raspberrypi3" | ||||||
|     r"|raspberrypi4-64" |     r"|raspberrypi4-64" | ||||||
|     r"|raspberrypi4" |     r"|raspberrypi4" | ||||||
|  |     r"|raspberrypi5-64" | ||||||
|     r"|yellow" |     r"|yellow" | ||||||
|     r"|green" |     r"|green" | ||||||
|     r"|tinker" |     r"|tinker" | ||||||
| @@ -260,6 +272,48 @@ def _migrate_addon_config(protocol=False): | |||||||
|                     name, |                     name, | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  |         # 2023-11 "map" entries can also be dict to allow path configuration | ||||||
|  |         volumes = [] | ||||||
|  |         for entry in config.get(ATTR_MAP, []): | ||||||
|  |             if isinstance(entry, dict): | ||||||
|  |                 volumes.append(entry) | ||||||
|  |             if isinstance(entry, str): | ||||||
|  |                 result = RE_VOLUME.match(entry) | ||||||
|  |                 if not result: | ||||||
|  |                     continue | ||||||
|  |                 volumes.append( | ||||||
|  |                     { | ||||||
|  |                         ATTR_TYPE: result.group(1), | ||||||
|  |                         ATTR_READ_ONLY: result.group(2) != "rw", | ||||||
|  |                     } | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |         if volumes: | ||||||
|  |             config[ATTR_MAP] = volumes | ||||||
|  |  | ||||||
|  |         # 2023-10 "config" became "homeassistant" so /config can be used for addon's public config | ||||||
|  |         if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes): | ||||||
|  |             if any( | ||||||
|  |                 volume | ||||||
|  |                 and volume[ATTR_TYPE] | ||||||
|  |                 in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG} | ||||||
|  |                 for volume in volumes | ||||||
|  |             ): | ||||||
|  |                 _LOGGER.warning( | ||||||
|  |                     "Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s", | ||||||
|  |                     MappingType.ADDON_CONFIG, | ||||||
|  |                     MappingType.HOMEASSISTANT_CONFIG, | ||||||
|  |                     MappingType.CONFIG, | ||||||
|  |                     name, | ||||||
|  |                 ) | ||||||
|  |             else: | ||||||
|  |                 _LOGGER.debug( | ||||||
|  |                     "Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s", | ||||||
|  |                     MappingType.CONFIG, | ||||||
|  |                     MappingType.HOMEASSISTANT_CONFIG, | ||||||
|  |                     name, | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|         return config |         return config | ||||||
|  |  | ||||||
|     return _migrate |     return _migrate | ||||||
| @@ -308,7 +362,15 @@ _SCHEMA_ADDON_CONFIG = vol.Schema( | |||||||
|         vol.Optional(ATTR_DEVICES): [str], |         vol.Optional(ATTR_DEVICES): [str], | ||||||
|         vol.Optional(ATTR_UDEV, default=False): vol.Boolean(), |         vol.Optional(ATTR_UDEV, default=False): vol.Boolean(), | ||||||
|         vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(), |         vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(), | ||||||
|         vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)], |         vol.Optional(ATTR_MAP, default=list): [ | ||||||
|  |             vol.Schema( | ||||||
|  |                 { | ||||||
|  |                     vol.Required(ATTR_TYPE): vol.Coerce(MappingType), | ||||||
|  |                     vol.Optional(ATTR_READ_ONLY, default=True): bool, | ||||||
|  |                     vol.Optional(ATTR_PATH): str, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         ], | ||||||
|         vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str}, |         vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str}, | ||||||
|         vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)], |         vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)], | ||||||
|         vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(), |         vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(), | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ from pathlib import Path | |||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from aiohttp import web | from aiohttp import web | ||||||
|  | from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher | ||||||
|  |  | ||||||
| from ..const import AddonState | from ..const import AddonState | ||||||
| from ..coresys import CoreSys, CoreSysAttributes | from ..coresys import CoreSys, CoreSysAttributes | ||||||
| @@ -64,9 +65,10 @@ class RestAPI(CoreSysAttributes): | |||||||
|                 "max_field_size": MAX_LINE_SIZE, |                 "max_field_size": MAX_LINE_SIZE, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |         attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher()) | ||||||
|  |  | ||||||
|         # service stuff |         # service stuff | ||||||
|         self._runner: web.AppRunner = web.AppRunner(self.webapp) |         self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5) | ||||||
|         self._site: web.TCPSite | None = None |         self._site: web.TCPSite | None = None | ||||||
|  |  | ||||||
|     async def load(self) -> None: |     async def load(self) -> None: | ||||||
| @@ -186,6 +188,8 @@ class RestAPI(CoreSysAttributes): | |||||||
|         # Boards endpoints |         # Boards endpoints | ||||||
|         self.webapp.add_routes( |         self.webapp.add_routes( | ||||||
|             [ |             [ | ||||||
|  |                 web.get("/os/boards/green", api_os.boards_green_info), | ||||||
|  |                 web.post("/os/boards/green", api_os.boards_green_options), | ||||||
|                 web.get("/os/boards/yellow", api_os.boards_yellow_info), |                 web.get("/os/boards/yellow", api_os.boards_yellow_info), | ||||||
|                 web.post("/os/boards/yellow", api_os.boards_yellow_options), |                 web.post("/os/boards/yellow", api_os.boards_yellow_options), | ||||||
|                 web.get("/os/boards/{board}", api_os.boards_other_info), |                 web.get("/os/boards/{board}", api_os.boards_other_info), | ||||||
| @@ -669,9 +673,7 @@ class RestAPI(CoreSysAttributes): | |||||||
|     async def start(self) -> None: |     async def start(self) -> None: | ||||||
|         """Run RESTful API webserver.""" |         """Run RESTful API webserver.""" | ||||||
|         await self._runner.setup() |         await self._runner.setup() | ||||||
|         self._site = web.TCPSite( |         self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80) | ||||||
|             self._runner, host="0.0.0.0", port=80, shutdown_timeout=5 |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             await self._site.start() |             await self._site.start() | ||||||
|   | |||||||
| @@ -8,8 +8,8 @@ from aiohttp import web | |||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
| from voluptuous.humanize import humanize_error | from voluptuous.humanize import humanize_error | ||||||
|  |  | ||||||
| from ..addons import AnyAddon |  | ||||||
| from ..addons.addon import Addon | from ..addons.addon import Addon | ||||||
|  | from ..addons.manager import AnyAddon | ||||||
| from ..addons.utils import rating_security | from ..addons.utils import rating_security | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     ATTR_ADDONS, |     ATTR_ADDONS, | ||||||
| @@ -388,7 +388,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|     def uninstall(self, request: web.Request) -> Awaitable[None]: |     def uninstall(self, request: web.Request) -> Awaitable[None]: | ||||||
|         """Uninstall add-on.""" |         """Uninstall add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self._extract_addon(request) | ||||||
|         return asyncio.shield(addon.uninstall()) |         return asyncio.shield(self.sys_addons.uninstall(addon.slug)) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def start(self, request: web.Request) -> None: |     async def start(self, request: web.Request) -> None: | ||||||
| @@ -414,7 +414,7 @@ class APIAddons(CoreSysAttributes): | |||||||
|     async def rebuild(self, request: web.Request) -> None: |     async def rebuild(self, request: web.Request) -> None: | ||||||
|         """Rebuild local build add-on.""" |         """Rebuild local build add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self._extract_addon(request) | ||||||
|         if start_task := await asyncio.shield(addon.rebuild()): |         if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)): | ||||||
|             await start_task |             await start_task | ||||||
|  |  | ||||||
|     @api_process_raw(CONTENT_TYPE_BINARY) |     @api_process_raw(CONTENT_TYPE_BINARY) | ||||||
|   | |||||||
| @@ -11,6 +11,7 @@ from ..addons.addon import Addon | |||||||
| from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM | from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIForbidden | from ..exceptions import APIForbidden | ||||||
|  | from ..utils.json import json_loads | ||||||
| from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL | from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL | ||||||
| from .utils import api_process, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| @@ -67,7 +68,7 @@ class APIAuth(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Json |         # Json | ||||||
|         if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: |         if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: | ||||||
|             data = await request.json() |             data = await request.json(loads=json_loads) | ||||||
|             return await self._process_dict(request, addon, data) |             return await self._process_dict(request, addon, data) | ||||||
|  |  | ||||||
|         # URL encoded |         # URL encoded | ||||||
|   | |||||||
| @@ -1,5 +1,6 @@ | |||||||
| """Backups RESTful API.""" | """Backups RESTful API.""" | ||||||
| import asyncio | import asyncio | ||||||
|  | import errno | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import re | import re | ||||||
| @@ -20,6 +21,7 @@ from ..const import ( | |||||||
|     ATTR_DAYS_UNTIL_STALE, |     ATTR_DAYS_UNTIL_STALE, | ||||||
|     ATTR_FOLDERS, |     ATTR_FOLDERS, | ||||||
|     ATTR_HOMEASSISTANT, |     ATTR_HOMEASSISTANT, | ||||||
|  |     ATTR_HOMEASSISTANT_EXCLUDE_DATABASE, | ||||||
|     ATTR_LOCATON, |     ATTR_LOCATON, | ||||||
|     ATTR_NAME, |     ATTR_NAME, | ||||||
|     ATTR_PASSWORD, |     ATTR_PASSWORD, | ||||||
| @@ -35,6 +37,7 @@ from ..const import ( | |||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import APIError | from ..exceptions import APIError | ||||||
| from ..mounts.const import MountUsage | from ..mounts.const import MountUsage | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
| from .const import CONTENT_TYPE_TAR | from .const import CONTENT_TYPE_TAR | ||||||
| from .utils import api_process, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| @@ -64,6 +67,7 @@ SCHEMA_BACKUP_FULL = vol.Schema( | |||||||
|         vol.Optional(ATTR_PASSWORD): vol.Maybe(str), |         vol.Optional(ATTR_PASSWORD): vol.Maybe(str), | ||||||
|         vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), |         vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), | ||||||
|         vol.Optional(ATTR_LOCATON): vol.Maybe(str), |         vol.Optional(ATTR_LOCATON): vol.Maybe(str), | ||||||
|  |         vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(), | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -184,6 +188,7 @@ class APIBackups(CoreSysAttributes): | |||||||
|             ATTR_ADDONS: data_addons, |             ATTR_ADDONS: data_addons, | ||||||
|             ATTR_REPOSITORIES: backup.repositories, |             ATTR_REPOSITORIES: backup.repositories, | ||||||
|             ATTR_FOLDERS: backup.folders, |             ATTR_FOLDERS: backup.folders, | ||||||
|  |             ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database, | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]: |     def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]: | ||||||
| @@ -285,6 +290,8 @@ class APIBackups(CoreSysAttributes): | |||||||
|                         backup.write(chunk) |                         backup.write(chunk) | ||||||
|  |  | ||||||
|             except OSError as err: |             except OSError as err: | ||||||
|  |                 if err.errno == errno.EBADMSG: | ||||||
|  |                     self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|                 _LOGGER.error("Can't write new backup file: %s", err) |                 _LOGGER.error("Can't write new backup file: %s", err) | ||||||
|                 return False |                 return False | ||||||
|  |  | ||||||
|   | |||||||
| @@ -23,7 +23,6 @@ ATTR_CONNECTION_BUS = "connection_bus" | |||||||
| ATTR_DATA_DISK = "data_disk" | ATTR_DATA_DISK = "data_disk" | ||||||
| ATTR_DEVICE = "device" | ATTR_DEVICE = "device" | ||||||
| ATTR_DEV_PATH = "dev_path" | ATTR_DEV_PATH = "dev_path" | ||||||
| ATTR_DISK_LED = "disk_led" |  | ||||||
| ATTR_DISKS = "disks" | ATTR_DISKS = "disks" | ||||||
| ATTR_DRIVES = "drives" | ATTR_DRIVES = "drives" | ||||||
| ATTR_DT_SYNCHRONIZED = "dt_synchronized" | ATTR_DT_SYNCHRONIZED = "dt_synchronized" | ||||||
| @@ -31,7 +30,6 @@ ATTR_DT_UTC = "dt_utc" | |||||||
| ATTR_EJECTABLE = "ejectable" | ATTR_EJECTABLE = "ejectable" | ||||||
| ATTR_FALLBACK = "fallback" | ATTR_FALLBACK = "fallback" | ||||||
| ATTR_FILESYSTEMS = "filesystems" | ATTR_FILESYSTEMS = "filesystems" | ||||||
| ATTR_HEARTBEAT_LED = "heartbeat_led" |  | ||||||
| ATTR_IDENTIFIERS = "identifiers" | ATTR_IDENTIFIERS = "identifiers" | ||||||
| ATTR_JOBS = "jobs" | ATTR_JOBS = "jobs" | ||||||
| ATTR_LLMNR = "llmnr" | ATTR_LLMNR = "llmnr" | ||||||
| @@ -41,7 +39,6 @@ ATTR_MODEL = "model" | |||||||
| ATTR_MOUNTS = "mounts" | ATTR_MOUNTS = "mounts" | ||||||
| ATTR_MOUNT_POINTS = "mount_points" | ATTR_MOUNT_POINTS = "mount_points" | ||||||
| ATTR_PANEL_PATH = "panel_path" | ATTR_PANEL_PATH = "panel_path" | ||||||
| ATTR_POWER_LED = "power_led" |  | ||||||
| ATTR_REMOVABLE = "removable" | ATTR_REMOVABLE = "removable" | ||||||
| ATTR_REVISION = "revision" | ATTR_REVISION = "revision" | ||||||
| ATTR_SEAT = "seat" | ATTR_SEAT = "seat" | ||||||
| @@ -49,6 +46,7 @@ ATTR_SIGNED = "signed" | |||||||
| ATTR_STARTUP_TIME = "startup_time" | ATTR_STARTUP_TIME = "startup_time" | ||||||
| ATTR_SUBSYSTEM = "subsystem" | ATTR_SUBSYSTEM = "subsystem" | ||||||
| ATTR_SYSFS = "sysfs" | ATTR_SYSFS = "sysfs" | ||||||
|  | ATTR_SYSTEM_HEALTH_LED = "system_health_led" | ||||||
| ATTR_TIME_DETECTED = "time_detected" | ATTR_TIME_DETECTED = "time_detected" | ||||||
| ATTR_UPDATE_TYPE = "update_type" | ATTR_UPDATE_TYPE = "update_type" | ||||||
| ATTR_USE_NTP = "use_ntp" | ATTR_USE_NTP = "use_ntp" | ||||||
|   | |||||||
| @@ -12,6 +12,7 @@ from ..const import ( | |||||||
|     ATTR_AUDIO_INPUT, |     ATTR_AUDIO_INPUT, | ||||||
|     ATTR_AUDIO_OUTPUT, |     ATTR_AUDIO_OUTPUT, | ||||||
|     ATTR_BACKUP, |     ATTR_BACKUP, | ||||||
|  |     ATTR_BACKUPS_EXCLUDE_DATABASE, | ||||||
|     ATTR_BLK_READ, |     ATTR_BLK_READ, | ||||||
|     ATTR_BLK_WRITE, |     ATTR_BLK_WRITE, | ||||||
|     ATTR_BOOT, |     ATTR_BOOT, | ||||||
| @@ -51,6 +52,7 @@ SCHEMA_OPTIONS = vol.Schema( | |||||||
|         vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str), |         vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str), | ||||||
|         vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str), |         vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str), | ||||||
|         vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str), |         vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str), | ||||||
|  |         vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(), | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -82,6 +84,7 @@ class APIHomeAssistant(CoreSysAttributes): | |||||||
|             ATTR_WATCHDOG: self.sys_homeassistant.watchdog, |             ATTR_WATCHDOG: self.sys_homeassistant.watchdog, | ||||||
|             ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input, |             ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input, | ||||||
|             ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output, |             ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output, | ||||||
|  |             ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database, | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
| @@ -113,6 +116,11 @@ class APIHomeAssistant(CoreSysAttributes): | |||||||
|         if ATTR_AUDIO_OUTPUT in body: |         if ATTR_AUDIO_OUTPUT in body: | ||||||
|             self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT] |             self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT] | ||||||
|  |  | ||||||
|  |         if ATTR_BACKUPS_EXCLUDE_DATABASE in body: | ||||||
|  |             self.sys_homeassistant.backups_exclude_database = body[ | ||||||
|  |                 ATTR_BACKUPS_EXCLUDE_DATABASE | ||||||
|  |             ] | ||||||
|  |  | ||||||
|         self.sys_homeassistant.save_data() |         self.sys_homeassistant.save_data() | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|   | |||||||
| @@ -48,6 +48,29 @@ SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema( | |||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1 | ||||||
|  | def must_be_empty_body(method: str, code: int) -> bool: | ||||||
|  |     """Check if a request must return an empty body.""" | ||||||
|  |     return ( | ||||||
|  |         status_code_must_be_empty_body(code) | ||||||
|  |         or method_must_be_empty_body(method) | ||||||
|  |         or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def method_must_be_empty_body(method: str) -> bool: | ||||||
|  |     """Check if a method must return an empty body.""" | ||||||
|  |     # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 | ||||||
|  |     # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 | ||||||
|  |     return method.upper() == hdrs.METH_HEAD | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def status_code_must_be_empty_body(code: int) -> bool: | ||||||
|  |     """Check if a status code must return an empty body.""" | ||||||
|  |     # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 | ||||||
|  |     return code in {204, 304} or 100 <= code < 200 | ||||||
|  |  | ||||||
|  |  | ||||||
| class APIIngress(CoreSysAttributes): | class APIIngress(CoreSysAttributes): | ||||||
|     """Ingress view to handle add-on webui routing.""" |     """Ingress view to handle add-on webui routing.""" | ||||||
|  |  | ||||||
| @@ -225,10 +248,18 @@ class APIIngress(CoreSysAttributes): | |||||||
|             skip_auto_headers={hdrs.CONTENT_TYPE}, |             skip_auto_headers={hdrs.CONTENT_TYPE}, | ||||||
|         ) as result: |         ) as result: | ||||||
|             headers = _response_header(result) |             headers = _response_header(result) | ||||||
|  |             # Avoid parsing content_type in simple cases for better performance | ||||||
|  |             if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE): | ||||||
|  |                 content_type = (maybe_content_type.partition(";"))[0].strip() | ||||||
|  |             else: | ||||||
|  |                 content_type = result.content_type | ||||||
|             # Simple request |             # Simple request | ||||||
|             if ( |             if ( | ||||||
|                 hdrs.CONTENT_LENGTH in result.headers |                 # empty body responses should not be streamed, | ||||||
|  |                 # otherwise aiohttp < 3.9.0 may generate | ||||||
|  |                 # an invalid "0\r\n\r\n" chunk instead of an empty response. | ||||||
|  |                 must_be_empty_body(request.method, result.status) | ||||||
|  |                 or hdrs.CONTENT_LENGTH in result.headers | ||||||
|                 and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000 |                 and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000 | ||||||
|             ): |             ): | ||||||
|                 # Return Response |                 # Return Response | ||||||
| @@ -236,13 +267,13 @@ class APIIngress(CoreSysAttributes): | |||||||
|                 return web.Response( |                 return web.Response( | ||||||
|                     headers=headers, |                     headers=headers, | ||||||
|                     status=result.status, |                     status=result.status, | ||||||
|                     content_type=result.content_type, |                     content_type=content_type, | ||||||
|                     body=body, |                     body=body, | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|             # Stream response |             # Stream response | ||||||
|             response = web.StreamResponse(status=result.status, headers=headers) |             response = web.StreamResponse(status=result.status, headers=headers) | ||||||
|             response.content_type = result.content_type |             response.content_type = content_type | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 await response.prepare(request) |                 await response.prepare(request) | ||||||
|   | |||||||
| @@ -19,6 +19,7 @@ from ...const import ( | |||||||
|     CoreState, |     CoreState, | ||||||
| ) | ) | ||||||
| from ...coresys import CoreSys, CoreSysAttributes | from ...coresys import CoreSys, CoreSysAttributes | ||||||
|  | from ...utils import version_is_new_enough | ||||||
| from ..utils import api_return_error, excract_supervisor_token | from ..utils import api_return_error, excract_supervisor_token | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
| @@ -273,9 +274,8 @@ class SecurityMiddleware(CoreSysAttributes): | |||||||
|     @middleware |     @middleware | ||||||
|     async def core_proxy(self, request: Request, handler: RequestHandler) -> Response: |     async def core_proxy(self, request: Request, handler: RequestHandler) -> Response: | ||||||
|         """Validate user from Core API proxy.""" |         """Validate user from Core API proxy.""" | ||||||
|         if ( |         if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough( | ||||||
|             request[REQUEST_FROM] != self.sys_homeassistant |             self.sys_homeassistant.version, _CORE_VERSION | ||||||
|             or self.sys_homeassistant.version >= _CORE_VERSION |  | ||||||
|         ): |         ): | ||||||
|             return await handler(request) |             return await handler(request) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -8,11 +8,15 @@ from aiohttp import web | |||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from ..const import ( | from ..const import ( | ||||||
|  |     ATTR_ACTIVITY_LED, | ||||||
|     ATTR_BOARD, |     ATTR_BOARD, | ||||||
|     ATTR_BOOT, |     ATTR_BOOT, | ||||||
|     ATTR_DEVICES, |     ATTR_DEVICES, | ||||||
|  |     ATTR_DISK_LED, | ||||||
|  |     ATTR_HEARTBEAT_LED, | ||||||
|     ATTR_ID, |     ATTR_ID, | ||||||
|     ATTR_NAME, |     ATTR_NAME, | ||||||
|  |     ATTR_POWER_LED, | ||||||
|     ATTR_SERIAL, |     ATTR_SERIAL, | ||||||
|     ATTR_SIZE, |     ATTR_SIZE, | ||||||
|     ATTR_UPDATE_AVAILABLE, |     ATTR_UPDATE_AVAILABLE, | ||||||
| @@ -27,21 +31,19 @@ from .const import ( | |||||||
|     ATTR_DATA_DISK, |     ATTR_DATA_DISK, | ||||||
|     ATTR_DEV_PATH, |     ATTR_DEV_PATH, | ||||||
|     ATTR_DEVICE, |     ATTR_DEVICE, | ||||||
|     ATTR_DISK_LED, |  | ||||||
|     ATTR_DISKS, |     ATTR_DISKS, | ||||||
|     ATTR_HEARTBEAT_LED, |  | ||||||
|     ATTR_MODEL, |     ATTR_MODEL, | ||||||
|     ATTR_POWER_LED, |     ATTR_SYSTEM_HEALTH_LED, | ||||||
|     ATTR_VENDOR, |     ATTR_VENDOR, | ||||||
| ) | ) | ||||||
| from .utils import api_process, api_validate | from .utils import api_process, api_validate | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
| SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) | SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) | ||||||
| SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str}) | SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str}) | ||||||
|  |  | ||||||
| # pylint: disable=no-value-for-parameter |  | ||||||
| SCHEMA_YELLOW_OPTIONS = vol.Schema( | SCHEMA_YELLOW_OPTIONS = vol.Schema( | ||||||
|     { |     { | ||||||
|         vol.Optional(ATTR_DISK_LED): vol.Boolean(), |         vol.Optional(ATTR_DISK_LED): vol.Boolean(), | ||||||
| @@ -49,6 +51,14 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema( | |||||||
|         vol.Optional(ATTR_POWER_LED): vol.Boolean(), |         vol.Optional(ATTR_POWER_LED): vol.Boolean(), | ||||||
|     } |     } | ||||||
| ) | ) | ||||||
|  | SCHEMA_GREEN_OPTIONS = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_POWER_LED): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(), | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  | # pylint: enable=no-value-for-parameter | ||||||
|  |  | ||||||
|  |  | ||||||
| class APIOS(CoreSysAttributes): | class APIOS(CoreSysAttributes): | ||||||
| @@ -105,6 +115,31 @@ class APIOS(CoreSysAttributes): | |||||||
|             ], |             ], | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def boards_green_info(self, request: web.Request) -> dict[str, Any]: | ||||||
|  |         """Get green board settings.""" | ||||||
|  |         return { | ||||||
|  |             ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led, | ||||||
|  |             ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led, | ||||||
|  |             ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     @api_process | ||||||
|  |     async def boards_green_options(self, request: web.Request) -> None: | ||||||
|  |         """Update green board settings.""" | ||||||
|  |         body = await api_validate(SCHEMA_GREEN_OPTIONS, request) | ||||||
|  |  | ||||||
|  |         if ATTR_ACTIVITY_LED in body: | ||||||
|  |             self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED] | ||||||
|  |  | ||||||
|  |         if ATTR_POWER_LED in body: | ||||||
|  |             self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED] | ||||||
|  |  | ||||||
|  |         if ATTR_SYSTEM_HEALTH_LED in body: | ||||||
|  |             self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED] | ||||||
|  |  | ||||||
|  |         self.sys_dbus.agent.board.green.save_data() | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]: |     async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]: | ||||||
|         """Get yellow board settings.""" |         """Get yellow board settings.""" | ||||||
| @@ -128,6 +163,7 @@ class APIOS(CoreSysAttributes): | |||||||
|         if ATTR_POWER_LED in body: |         if ATTR_POWER_LED in body: | ||||||
|             self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED] |             self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED] | ||||||
|  |  | ||||||
|  |         self.sys_dbus.agent.board.yellow.save_data() | ||||||
|         self.sys_resolution.create_issue( |         self.sys_resolution.create_issue( | ||||||
|             IssueType.REBOOT_REQUIRED, |             IssueType.REBOOT_REQUIRED, | ||||||
|             ContextType.SYSTEM, |             ContextType.SYSTEM, | ||||||
|   | |||||||
| @@ -6,7 +6,10 @@ import logging | |||||||
| import aiohttp | import aiohttp | ||||||
| from aiohttp import web | from aiohttp import web | ||||||
| from aiohttp.client_exceptions import ClientConnectorError | from aiohttp.client_exceptions import ClientConnectorError | ||||||
|  | from aiohttp.client_ws import ClientWebSocketResponse | ||||||
| from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE | from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE | ||||||
|  | from aiohttp.http import WSMessage | ||||||
|  | from aiohttp.http_websocket import WSMsgType | ||||||
| from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized | from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized | ||||||
|  |  | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| @@ -74,7 +77,7 @@ class APIProxy(CoreSysAttributes): | |||||||
|             _LOGGER.error("Error on API for request %s", path) |             _LOGGER.error("Error on API for request %s", path) | ||||||
|         except aiohttp.ClientError as err: |         except aiohttp.ClientError as err: | ||||||
|             _LOGGER.error("Client error on API %s request %s", path, err) |             _LOGGER.error("Client error on API %s request %s", path, err) | ||||||
|         except asyncio.TimeoutError: |         except TimeoutError: | ||||||
|             _LOGGER.error("Client timeout error on API request %s", path) |             _LOGGER.error("Client timeout error on API request %s", path) | ||||||
|  |  | ||||||
|         raise HTTPBadGateway() |         raise HTTPBadGateway() | ||||||
| @@ -114,7 +117,7 @@ class APIProxy(CoreSysAttributes): | |||||||
|                 body=data, status=client.status, content_type=client.content_type |                 body=data, status=client.status, content_type=client.content_type | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|     async def _websocket_client(self): |     async def _websocket_client(self) -> ClientWebSocketResponse: | ||||||
|         """Initialize a WebSocket API connection.""" |         """Initialize a WebSocket API connection.""" | ||||||
|         url = f"{self.sys_homeassistant.api_url}/api/websocket" |         url = f"{self.sys_homeassistant.api_url}/api/websocket" | ||||||
|  |  | ||||||
| @@ -167,6 +170,25 @@ class APIProxy(CoreSysAttributes): | |||||||
|  |  | ||||||
|         raise APIError() |         raise APIError() | ||||||
|  |  | ||||||
|  |     async def _proxy_message( | ||||||
|  |         self, | ||||||
|  |         read_task: asyncio.Task, | ||||||
|  |         target: web.WebSocketResponse | ClientWebSocketResponse, | ||||||
|  |     ) -> None: | ||||||
|  |         """Proxy a message from client to server or vice versa.""" | ||||||
|  |         if read_task.exception(): | ||||||
|  |             raise read_task.exception() | ||||||
|  |  | ||||||
|  |         msg: WSMessage = read_task.result() | ||||||
|  |         if msg.type == WSMsgType.TEXT: | ||||||
|  |             return await target.send_str(msg.data) | ||||||
|  |         if msg.type == WSMsgType.BINARY: | ||||||
|  |             return await target.send_bytes(msg.data) | ||||||
|  |  | ||||||
|  |         raise TypeError( | ||||||
|  |             f"Cannot proxy websocket message of unsupported type: {msg.type}" | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     async def websocket(self, request: web.Request): |     async def websocket(self, request: web.Request): | ||||||
|         """Initialize a WebSocket API connection.""" |         """Initialize a WebSocket API connection.""" | ||||||
|         if not await self.sys_homeassistant.api.check_api_state(): |         if not await self.sys_homeassistant.api.check_api_state(): | ||||||
| @@ -214,13 +236,13 @@ class APIProxy(CoreSysAttributes): | |||||||
|  |  | ||||||
|         _LOGGER.info("Home Assistant WebSocket API request running") |         _LOGGER.info("Home Assistant WebSocket API request running") | ||||||
|         try: |         try: | ||||||
|             client_read = None |             client_read: asyncio.Task | None = None | ||||||
|             server_read = None |             server_read: asyncio.Task | None = None | ||||||
|             while not server.closed and not client.closed: |             while not server.closed and not client.closed: | ||||||
|                 if not client_read: |                 if not client_read: | ||||||
|                     client_read = self.sys_create_task(client.receive_str()) |                     client_read = self.sys_create_task(client.receive()) | ||||||
|                 if not server_read: |                 if not server_read: | ||||||
|                     server_read = self.sys_create_task(server.receive_str()) |                     server_read = self.sys_create_task(server.receive()) | ||||||
|  |  | ||||||
|                 # wait until data need to be processed |                 # wait until data need to be processed | ||||||
|                 await asyncio.wait( |                 await asyncio.wait( | ||||||
| @@ -229,14 +251,12 @@ class APIProxy(CoreSysAttributes): | |||||||
|  |  | ||||||
|                 # server |                 # server | ||||||
|                 if server_read.done() and not client.closed: |                 if server_read.done() and not client.closed: | ||||||
|                     server_read.exception() |                     await self._proxy_message(server_read, client) | ||||||
|                     await client.send_str(server_read.result()) |  | ||||||
|                     server_read = None |                     server_read = None | ||||||
|  |  | ||||||
|                 # client |                 # client | ||||||
|                 if client_read.done() and not server.closed: |                 if client_read.done() and not server.closed: | ||||||
|                     client_read.exception() |                     await self._proxy_message(client_read, server) | ||||||
|                     await server.send_str(client_read.result()) |  | ||||||
|                     client_read = None |                     client_read = None | ||||||
|  |  | ||||||
|         except asyncio.CancelledError: |         except asyncio.CancelledError: | ||||||
| @@ -246,9 +266,9 @@ class APIProxy(CoreSysAttributes): | |||||||
|             _LOGGER.info("Home Assistant WebSocket API error: %s", err) |             _LOGGER.info("Home Assistant WebSocket API error: %s", err) | ||||||
|  |  | ||||||
|         finally: |         finally: | ||||||
|             if client_read: |             if client_read and not client_read.done(): | ||||||
|                 client_read.cancel() |                 client_read.cancel() | ||||||
|             if server_read: |             if server_read and not server_read.done(): | ||||||
|                 server_read.cancel() |                 server_read.cancel() | ||||||
|  |  | ||||||
|             # close connections |             # close connections | ||||||
|   | |||||||
| @@ -6,7 +6,7 @@ from typing import Any | |||||||
| from aiohttp import web | from aiohttp import web | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from ..addons import AnyAddon | from ..addons.manager import AnyAddon | ||||||
| from ..addons.utils import rating_security | from ..addons.utils import rating_security | ||||||
| from ..api.const import ATTR_SIGNED | from ..api.const import ATTR_SIGNED | ||||||
| from ..api.utils import api_process, api_process_raw, api_validate | from ..api.utils import api_process, api_process_raw, api_validate | ||||||
| @@ -186,18 +186,20 @@ class APIStore(CoreSysAttributes): | |||||||
|         } |         } | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def addons_list(self, request: web.Request) -> list[dict[str, Any]]: |     async def addons_list(self, request: web.Request) -> dict[str, Any]: | ||||||
|         """Return all store add-ons.""" |         """Return all store add-ons.""" | ||||||
|         return [ |         return { | ||||||
|  |             ATTR_ADDONS: [ | ||||||
|                 self._generate_addon_information(self.sys_addons.store[addon]) |                 self._generate_addon_information(self.sys_addons.store[addon]) | ||||||
|                 for addon in self.sys_addons.store |                 for addon in self.sys_addons.store | ||||||
|             ] |             ] | ||||||
|  |         } | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     def addons_addon_install(self, request: web.Request) -> Awaitable[None]: |     def addons_addon_install(self, request: web.Request) -> Awaitable[None]: | ||||||
|         """Install add-on.""" |         """Install add-on.""" | ||||||
|         addon = self._extract_addon(request) |         addon = self._extract_addon(request) | ||||||
|         return asyncio.shield(addon.install()) |         return asyncio.shield(self.sys_addons.install(addon.slug)) | ||||||
|  |  | ||||||
|     @api_process |     @api_process | ||||||
|     async def addons_addon_update(self, request: web.Request) -> None: |     async def addons_addon_update(self, request: web.Request) -> None: | ||||||
| @@ -209,7 +211,7 @@ class APIStore(CoreSysAttributes): | |||||||
|         body = await api_validate(SCHEMA_UPDATE, request) |         body = await api_validate(SCHEMA_UPDATE, request) | ||||||
|  |  | ||||||
|         if start_task := await asyncio.shield( |         if start_task := await asyncio.shield( | ||||||
|             addon.update(backup=body.get(ATTR_BACKUP)) |             self.sys_addons.update(addon.slug, backup=body.get(ATTR_BACKUP)) | ||||||
|         ): |         ): | ||||||
|             await start_task |             await start_task | ||||||
|  |  | ||||||
|   | |||||||
| @@ -22,7 +22,7 @@ from ..const import ( | |||||||
| from ..coresys import CoreSys | from ..coresys import CoreSys | ||||||
| from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError | from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError | ||||||
| from ..utils import check_exception_chain, get_message_from_exception_chain | from ..utils import check_exception_chain, get_message_from_exception_chain | ||||||
| from ..utils.json import JSONEncoder | from ..utils.json import json_dumps, json_loads as json_loads_util | ||||||
| from ..utils.log_format import format_message | from ..utils.log_format import format_message | ||||||
| from .const import CONTENT_TYPE_BINARY | from .const import CONTENT_TYPE_BINARY | ||||||
|  |  | ||||||
| @@ -48,7 +48,7 @@ def json_loads(data: Any) -> dict[str, Any]: | |||||||
|     if not data: |     if not data: | ||||||
|         return {} |         return {} | ||||||
|     try: |     try: | ||||||
|         return json.loads(data) |         return json_loads_util(data) | ||||||
|     except json.JSONDecodeError as err: |     except json.JSONDecodeError as err: | ||||||
|         raise APIError("Invalid json") from err |         raise APIError("Invalid json") from err | ||||||
|  |  | ||||||
| @@ -130,7 +130,7 @@ def api_return_error( | |||||||
|             JSON_MESSAGE: message or "Unknown error, see supervisor", |             JSON_MESSAGE: message or "Unknown error, see supervisor", | ||||||
|         }, |         }, | ||||||
|         status=400, |         status=400, | ||||||
|         dumps=lambda x: json.dumps(x, cls=JSONEncoder), |         dumps=json_dumps, | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -138,7 +138,7 @@ def api_return_ok(data: dict[str, Any] | None = None) -> web.Response: | |||||||
|     """Return an API ok answer.""" |     """Return an API ok answer.""" | ||||||
|     return web.json_response( |     return web.json_response( | ||||||
|         {JSON_RESULT: RESULT_OK, JSON_DATA: data or {}}, |         {JSON_RESULT: RESULT_OK, JSON_DATA: data or {}}, | ||||||
|         dumps=lambda x: json.dumps(x, cls=JSONEncoder), |         dumps=json_dumps, | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -28,6 +28,7 @@ class CpuArch(CoreSysAttributes): | |||||||
|         """Initialize CPU Architecture handler.""" |         """Initialize CPU Architecture handler.""" | ||||||
|         self.coresys = coresys |         self.coresys = coresys | ||||||
|         self._supported_arch: list[str] = [] |         self._supported_arch: list[str] = [] | ||||||
|  |         self._supported_set: set[str] = set() | ||||||
|         self._default_arch: str |         self._default_arch: str | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -70,9 +71,11 @@ class CpuArch(CoreSysAttributes): | |||||||
|         if native_support not in self._supported_arch: |         if native_support not in self._supported_arch: | ||||||
|             self._supported_arch.append(native_support) |             self._supported_arch.append(native_support) | ||||||
|  |  | ||||||
|  |         self._supported_set = set(self._supported_arch) | ||||||
|  |  | ||||||
|     def is_supported(self, arch_list: list[str]) -> bool: |     def is_supported(self, arch_list: list[str]) -> bool: | ||||||
|         """Return True if there is a supported arch by this platform.""" |         """Return True if there is a supported arch by this platform.""" | ||||||
|         return not set(self.supported).isdisjoint(set(arch_list)) |         return not self._supported_set.isdisjoint(arch_list) | ||||||
|  |  | ||||||
|     def match(self, arch_list: list[str]) -> str: |     def match(self, arch_list: list[str]) -> str: | ||||||
|         """Return best match for this CPU/Platform.""" |         """Return best match for this CPU/Platform.""" | ||||||
|   | |||||||
| @@ -19,13 +19,14 @@ from securetar import SecureTarFile, atomic_contents_add, secure_path | |||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
| from voluptuous.humanize import humanize_error | from voluptuous.humanize import humanize_error | ||||||
|  |  | ||||||
| from ..addons import Addon | from ..addons.manager import Addon | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     ATTR_ADDONS, |     ATTR_ADDONS, | ||||||
|     ATTR_COMPRESSED, |     ATTR_COMPRESSED, | ||||||
|     ATTR_CRYPTO, |     ATTR_CRYPTO, | ||||||
|     ATTR_DATE, |     ATTR_DATE, | ||||||
|     ATTR_DOCKER, |     ATTR_DOCKER, | ||||||
|  |     ATTR_EXCLUDE_DATABASE, | ||||||
|     ATTR_FOLDERS, |     ATTR_FOLDERS, | ||||||
|     ATTR_HOMEASSISTANT, |     ATTR_HOMEASSISTANT, | ||||||
|     ATTR_NAME, |     ATTR_NAME, | ||||||
| @@ -130,7 +131,14 @@ class Backup(CoreSysAttributes): | |||||||
|         """Return backup Home Assistant version.""" |         """Return backup Home Assistant version.""" | ||||||
|         if self.homeassistant is None: |         if self.homeassistant is None: | ||||||
|             return None |             return None | ||||||
|         return self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] |         return self.homeassistant[ATTR_VERSION] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def homeassistant_exclude_database(self) -> bool: | ||||||
|  |         """Return whether database was excluded from Home Assistant backup.""" | ||||||
|  |         if self.homeassistant is None: | ||||||
|  |             return None | ||||||
|  |         return self.homeassistant[ATTR_EXCLUDE_DATABASE] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def homeassistant(self): |     def homeassistant(self): | ||||||
| @@ -307,7 +315,11 @@ class Backup(CoreSysAttributes): | |||||||
|         def _extract_backup(): |         def _extract_backup(): | ||||||
|             """Extract a backup.""" |             """Extract a backup.""" | ||||||
|             with tarfile.open(self.tarfile, "r:") as tar: |             with tarfile.open(self.tarfile, "r:") as tar: | ||||||
|                 tar.extractall(path=self._tmp.name, members=secure_path(tar)) |                 tar.extractall( | ||||||
|  |                     path=self._tmp.name, | ||||||
|  |                     members=secure_path(tar), | ||||||
|  |                     filter="fully_trusted", | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|         await self.sys_run_in_executor(_extract_backup) |         await self.sys_run_in_executor(_extract_backup) | ||||||
|  |  | ||||||
| @@ -341,14 +353,14 @@ class Backup(CoreSysAttributes): | |||||||
|         finally: |         finally: | ||||||
|             self._tmp.cleanup() |             self._tmp.cleanup() | ||||||
|  |  | ||||||
|     async def store_addons(self, addon_list: list[str]) -> list[Awaitable[None]]: |     async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]: | ||||||
|         """Add a list of add-ons into backup. |         """Add a list of add-ons into backup. | ||||||
|  |  | ||||||
|         For each addon that needs to be started after backup, returns a task which |         For each addon that needs to be started after backup, returns a Task which | ||||||
|         completes when that addon has state 'started' (see addon.start). |         completes when that addon has state 'started' (see addon.start). | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         async def _addon_save(addon: Addon) -> Awaitable[None] | None: |         async def _addon_save(addon: Addon) -> asyncio.Task | None: | ||||||
|             """Task to store an add-on into backup.""" |             """Task to store an add-on into backup.""" | ||||||
|             tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}" |             tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}" | ||||||
|             addon_file = SecureTarFile( |             addon_file = SecureTarFile( | ||||||
| @@ -380,7 +392,7 @@ class Backup(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Save Add-ons sequential |         # Save Add-ons sequential | ||||||
|         # avoid issue on slow IO |         # avoid issue on slow IO | ||||||
|         start_tasks: list[Awaitable[None]] = [] |         start_tasks: list[asyncio.Task] = [] | ||||||
|         for addon in addon_list: |         for addon in addon_list: | ||||||
|             try: |             try: | ||||||
|                 if start_task := await _addon_save(addon): |                 if start_task := await _addon_save(addon): | ||||||
| @@ -390,10 +402,12 @@ class Backup(CoreSysAttributes): | |||||||
|  |  | ||||||
|         return start_tasks |         return start_tasks | ||||||
|  |  | ||||||
|     async def restore_addons(self, addon_list: list[str]) -> list[Awaitable[None]]: |     async def restore_addons( | ||||||
|  |         self, addon_list: list[str] | ||||||
|  |     ) -> tuple[bool, list[asyncio.Task]]: | ||||||
|         """Restore a list add-on from backup.""" |         """Restore a list add-on from backup.""" | ||||||
|  |  | ||||||
|         async def _addon_restore(addon_slug: str) -> Awaitable[None] | None: |         async def _addon_restore(addon_slug: str) -> tuple[bool, asyncio.Task | None]: | ||||||
|             """Task to restore an add-on into backup.""" |             """Task to restore an add-on into backup.""" | ||||||
|             tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" |             tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" | ||||||
|             addon_file = SecureTarFile( |             addon_file = SecureTarFile( | ||||||
| @@ -407,25 +421,31 @@ class Backup(CoreSysAttributes): | |||||||
|             # If exists inside backup |             # If exists inside backup | ||||||
|             if not addon_file.path.exists(): |             if not addon_file.path.exists(): | ||||||
|                 _LOGGER.error("Can't find backup %s", addon_slug) |                 _LOGGER.error("Can't find backup %s", addon_slug) | ||||||
|                 return |                 return (False, None) | ||||||
|  |  | ||||||
|             # Perform a restore |             # Perform a restore | ||||||
|             try: |             try: | ||||||
|                 return await self.sys_addons.restore(addon_slug, addon_file) |                 return (True, await self.sys_addons.restore(addon_slug, addon_file)) | ||||||
|             except AddonsError: |             except AddonsError: | ||||||
|                 _LOGGER.error("Can't restore backup %s", addon_slug) |                 _LOGGER.error("Can't restore backup %s", addon_slug) | ||||||
|  |                 return (False, None) | ||||||
|  |  | ||||||
|         # Save Add-ons sequential |         # Save Add-ons sequential | ||||||
|         # avoid issue on slow IO |         # avoid issue on slow IO | ||||||
|         start_tasks: list[Awaitable[None]] = [] |         start_tasks: list[asyncio.Task] = [] | ||||||
|  |         success = True | ||||||
|         for slug in addon_list: |         for slug in addon_list: | ||||||
|             try: |             try: | ||||||
|                 if start_task := await _addon_restore(slug): |                 addon_success, start_task = await _addon_restore(slug) | ||||||
|                     start_tasks.append(start_task) |  | ||||||
|             except Exception as err:  # pylint: disable=broad-except |             except Exception as err:  # pylint: disable=broad-except | ||||||
|                 _LOGGER.warning("Can't restore Add-on %s: %s", slug, err) |                 _LOGGER.warning("Can't restore Add-on %s: %s", slug, err) | ||||||
|  |                 success = False | ||||||
|  |             else: | ||||||
|  |                 success = success and addon_success | ||||||
|  |                 if start_task: | ||||||
|  |                     start_tasks.append(start_task) | ||||||
|  |  | ||||||
|         return start_tasks |         return (success, start_tasks) | ||||||
|  |  | ||||||
|     async def store_folders(self, folder_list: list[str]): |     async def store_folders(self, folder_list: list[str]): | ||||||
|         """Backup Supervisor data into backup.""" |         """Backup Supervisor data into backup.""" | ||||||
| @@ -475,10 +495,11 @@ class Backup(CoreSysAttributes): | |||||||
|                     f"Can't backup folder {folder}: {str(err)}", _LOGGER.error |                     f"Can't backup folder {folder}: {str(err)}", _LOGGER.error | ||||||
|                 ) from err |                 ) from err | ||||||
|  |  | ||||||
|     async def restore_folders(self, folder_list: list[str]): |     async def restore_folders(self, folder_list: list[str]) -> bool: | ||||||
|         """Backup Supervisor data into backup.""" |         """Backup Supervisor data into backup.""" | ||||||
|  |         success = True | ||||||
|  |  | ||||||
|         async def _folder_restore(name: str) -> None: |         async def _folder_restore(name: str) -> bool: | ||||||
|             """Intenal function to restore a folder.""" |             """Intenal function to restore a folder.""" | ||||||
|             slug_name = name.replace("/", "_") |             slug_name = name.replace("/", "_") | ||||||
|             tar_name = Path( |             tar_name = Path( | ||||||
| @@ -489,7 +510,7 @@ class Backup(CoreSysAttributes): | |||||||
|             # Check if exists inside backup |             # Check if exists inside backup | ||||||
|             if not tar_name.exists(): |             if not tar_name.exists(): | ||||||
|                 _LOGGER.warning("Can't find restore folder %s", name) |                 _LOGGER.warning("Can't find restore folder %s", name) | ||||||
|                 return |                 return False | ||||||
|  |  | ||||||
|             # Unmount any mounts within folder |             # Unmount any mounts within folder | ||||||
|             bind_mounts = [ |             bind_mounts = [ | ||||||
| @@ -508,7 +529,7 @@ class Backup(CoreSysAttributes): | |||||||
|                 await remove_folder(origin_dir, content_only=True) |                 await remove_folder(origin_dir, content_only=True) | ||||||
|  |  | ||||||
|             # Perform a restore |             # Perform a restore | ||||||
|             def _restore() -> None: |             def _restore() -> bool: | ||||||
|                 try: |                 try: | ||||||
|                     _LOGGER.info("Restore folder %s", name) |                     _LOGGER.info("Restore folder %s", name) | ||||||
|                     with SecureTarFile( |                     with SecureTarFile( | ||||||
| @@ -518,13 +539,17 @@ class Backup(CoreSysAttributes): | |||||||
|                         gzip=self.compressed, |                         gzip=self.compressed, | ||||||
|                         bufsize=BUF_SIZE, |                         bufsize=BUF_SIZE, | ||||||
|                     ) as tar_file: |                     ) as tar_file: | ||||||
|                         tar_file.extractall(path=origin_dir, members=tar_file) |                         tar_file.extractall( | ||||||
|  |                             path=origin_dir, members=tar_file, filter="fully_trusted" | ||||||
|  |                         ) | ||||||
|                     _LOGGER.info("Restore folder %s done", name) |                     _LOGGER.info("Restore folder %s done", name) | ||||||
|                 except (tarfile.TarError, OSError) as err: |                 except (tarfile.TarError, OSError) as err: | ||||||
|                     _LOGGER.warning("Can't restore folder %s: %s", name, err) |                     _LOGGER.warning("Can't restore folder %s: %s", name, err) | ||||||
|  |                     return False | ||||||
|  |                 return True | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 await self.sys_run_in_executor(_restore) |                 return await self.sys_run_in_executor(_restore) | ||||||
|             finally: |             finally: | ||||||
|                 if bind_mounts: |                 if bind_mounts: | ||||||
|                     await asyncio.gather( |                     await asyncio.gather( | ||||||
| @@ -535,13 +560,18 @@ class Backup(CoreSysAttributes): | |||||||
|         # avoid issue on slow IO |         # avoid issue on slow IO | ||||||
|         for folder in folder_list: |         for folder in folder_list: | ||||||
|             try: |             try: | ||||||
|                 await _folder_restore(folder) |                 success = success and await _folder_restore(folder) | ||||||
|             except Exception as err:  # pylint: disable=broad-except |             except Exception as err:  # pylint: disable=broad-except | ||||||
|                 _LOGGER.warning("Can't restore folder %s: %s", folder, err) |                 _LOGGER.warning("Can't restore folder %s: %s", folder, err) | ||||||
|  |                 success = False | ||||||
|  |         return success | ||||||
|  |  | ||||||
|     async def store_homeassistant(self): |     async def store_homeassistant(self, exclude_database: bool = False): | ||||||
|         """Backup Home Assitant Core configuration folder.""" |         """Backup Home Assistant Core configuration folder.""" | ||||||
|         self._data[ATTR_HOMEASSISTANT] = {ATTR_VERSION: self.sys_homeassistant.version} |         self._data[ATTR_HOMEASSISTANT] = { | ||||||
|  |             ATTR_VERSION: self.sys_homeassistant.version, | ||||||
|  |             ATTR_EXCLUDE_DATABASE: exclude_database, | ||||||
|  |         } | ||||||
|  |  | ||||||
|         # Backup Home Assistant Core config directory |         # Backup Home Assistant Core config directory | ||||||
|         tar_name = Path( |         tar_name = Path( | ||||||
| @@ -551,13 +581,13 @@ class Backup(CoreSysAttributes): | |||||||
|             tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE |             tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         await self.sys_homeassistant.backup(homeassistant_file) |         await self.sys_homeassistant.backup(homeassistant_file, exclude_database) | ||||||
|  |  | ||||||
|         # Store size |         # Store size | ||||||
|         self.homeassistant[ATTR_SIZE] = homeassistant_file.size |         self.homeassistant[ATTR_SIZE] = homeassistant_file.size | ||||||
|  |  | ||||||
|     async def restore_homeassistant(self) -> Awaitable[None]: |     async def restore_homeassistant(self) -> Awaitable[None]: | ||||||
|         """Restore Home Assitant Core configuration folder.""" |         """Restore Home Assistant Core configuration folder.""" | ||||||
|         await self.sys_homeassistant.core.stop() |         await self.sys_homeassistant.core.stop() | ||||||
|  |  | ||||||
|         # Restore Home Assistant Core config directory |         # Restore Home Assistant Core config directory | ||||||
| @@ -568,7 +598,9 @@ class Backup(CoreSysAttributes): | |||||||
|             tar_name, "r", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE |             tar_name, "r", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         await self.sys_homeassistant.restore(homeassistant_file) |         await self.sys_homeassistant.restore( | ||||||
|  |             homeassistant_file, self.homeassistant_exclude_database | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         # Generate restore task |         # Generate restore task | ||||||
|         async def _core_update(): |         async def _core_update(): | ||||||
| @@ -591,12 +623,12 @@ class Backup(CoreSysAttributes): | |||||||
|         """Store repository list into backup.""" |         """Store repository list into backup.""" | ||||||
|         self.repositories = self.sys_store.repository_urls |         self.repositories = self.sys_store.repository_urls | ||||||
|  |  | ||||||
|     async def restore_repositories(self, replace: bool = False): |     def restore_repositories(self, replace: bool = False) -> Awaitable[None]: | ||||||
|         """Restore repositories from backup. |         """Restore repositories from backup. | ||||||
|  |  | ||||||
|         Return a coroutine. |         Return a coroutine. | ||||||
|         """ |         """ | ||||||
|         await self.sys_store.update_repositories( |         return self.sys_store.update_repositories( | ||||||
|             self.repositories, add_with_errors=True, replace=replace |             self.repositories, add_with_errors=True, replace=replace | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -3,6 +3,7 @@ from __future__ import annotations | |||||||
|  |  | ||||||
| import asyncio | import asyncio | ||||||
| from collections.abc import Awaitable, Iterable | from collections.abc import Awaitable, Iterable | ||||||
|  | import errno | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
|  |  | ||||||
| @@ -14,11 +15,12 @@ from ..const import ( | |||||||
|     CoreState, |     CoreState, | ||||||
| ) | ) | ||||||
| from ..dbus.const import UnitActiveState | from ..dbus.const import UnitActiveState | ||||||
| from ..exceptions import AddonsError, BackupError, BackupJobError | from ..exceptions import AddonsError, BackupError, BackupInvalidError, BackupJobError | ||||||
| from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit | from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
| from ..jobs.job_group import JobGroup | from ..jobs.job_group import JobGroup | ||||||
| from ..mounts.mount import Mount | from ..mounts.mount import Mount | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
| from ..utils.common import FileConfiguration | from ..utils.common import FileConfiguration | ||||||
| from ..utils.dt import utcnow | from ..utils.dt import utcnow | ||||||
| from ..utils.sentinel import DEFAULT | from ..utils.sentinel import DEFAULT | ||||||
| @@ -31,18 +33,6 @@ from .validate import ALL_FOLDERS, SCHEMA_BACKUPS_CONFIG | |||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
| def _list_backup_files(path: Path) -> Iterable[Path]: |  | ||||||
|     """Return iterable of backup files, suppress and log OSError for network mounts.""" |  | ||||||
|     try: |  | ||||||
|         # is_dir does a stat syscall which raises if the mount is down |  | ||||||
|         if path.is_dir(): |  | ||||||
|             return path.glob("*.tar") |  | ||||||
|     except OSError as err: |  | ||||||
|         _LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err) |  | ||||||
|  |  | ||||||
|     return [] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BackupManager(FileConfiguration, JobGroup): | class BackupManager(FileConfiguration, JobGroup): | ||||||
|     """Manage backups.""" |     """Manage backups.""" | ||||||
|  |  | ||||||
| @@ -119,6 +109,19 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         ) |         ) | ||||||
|         self.sys_jobs.current.stage = stage |         self.sys_jobs.current.stage = stage | ||||||
|  |  | ||||||
|  |     def _list_backup_files(self, path: Path) -> Iterable[Path]: | ||||||
|  |         """Return iterable of backup files, suppress and log OSError for network mounts.""" | ||||||
|  |         try: | ||||||
|  |             # is_dir does a stat syscall which raises if the mount is down | ||||||
|  |             if path.is_dir(): | ||||||
|  |                 return path.glob("*.tar") | ||||||
|  |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG and path == self.sys_config.path_backup: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|  |             _LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err) | ||||||
|  |  | ||||||
|  |         return [] | ||||||
|  |  | ||||||
|     def _create_backup( |     def _create_backup( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
| @@ -169,7 +172,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         tasks = [ |         tasks = [ | ||||||
|             self.sys_create_task(_load_backup(tar_file)) |             self.sys_create_task(_load_backup(tar_file)) | ||||||
|             for path in self.backup_locations |             for path in self.backup_locations | ||||||
|             for tar_file in _list_backup_files(path) |             for tar_file in self._list_backup_files(path) | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|         _LOGGER.info("Found %d backup files", len(tasks)) |         _LOGGER.info("Found %d backup files", len(tasks)) | ||||||
| @@ -184,6 +187,11 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|             _LOGGER.info("Removed backup file %s", backup.slug) |             _LOGGER.info("Removed backup file %s", backup.slug) | ||||||
|  |  | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if ( | ||||||
|  |                 err.errno == errno.EBADMSG | ||||||
|  |                 and backup.tarfile.parent == self.sys_config.path_backup | ||||||
|  |             ): | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error("Can't remove backup %s: %s", backup.slug, err) |             _LOGGER.error("Can't remove backup %s: %s", backup.slug, err) | ||||||
|             return False |             return False | ||||||
|  |  | ||||||
| @@ -208,6 +216,8 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|             backup.tarfile.rename(tar_origin) |             backup.tarfile.rename(tar_origin) | ||||||
|  |  | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error("Can't move backup file to storage: %s", err) |             _LOGGER.error("Can't move backup file to storage: %s", err) | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
| @@ -226,6 +236,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         addon_list: list[Addon], |         addon_list: list[Addon], | ||||||
|         folder_list: list[str], |         folder_list: list[str], | ||||||
|         homeassistant: bool, |         homeassistant: bool, | ||||||
|  |         homeassistant_exclude_database: bool | None, | ||||||
|     ) -> Backup | None: |     ) -> Backup | None: | ||||||
|         """Create a backup. |         """Create a backup. | ||||||
|  |  | ||||||
| @@ -245,7 +256,11 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                 # HomeAssistant Folder is for v1 |                 # HomeAssistant Folder is for v1 | ||||||
|                 if homeassistant: |                 if homeassistant: | ||||||
|                     self._change_stage(BackupJobStage.HOME_ASSISTANT, backup) |                     self._change_stage(BackupJobStage.HOME_ASSISTANT, backup) | ||||||
|                     await backup.store_homeassistant() |                     await backup.store_homeassistant( | ||||||
|  |                         self.sys_homeassistant.backups_exclude_database | ||||||
|  |                         if homeassistant_exclude_database is None | ||||||
|  |                         else homeassistant_exclude_database | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|                 # Backup folders |                 # Backup folders | ||||||
|                 if folder_list: |                 if folder_list: | ||||||
| @@ -272,7 +287,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|         name="backup_manager_full_backup", |         name="backup_manager_full_backup", | ||||||
|         conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING], |         conditions=[JobCondition.RUNNING], | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|         on_condition=BackupJobError, |         on_condition=BackupJobError, | ||||||
|     ) |     ) | ||||||
| @@ -282,15 +297,25 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         password: str | None = None, |         password: str | None = None, | ||||||
|         compressed: bool = True, |         compressed: bool = True, | ||||||
|         location: Mount | type[DEFAULT] | None = DEFAULT, |         location: Mount | type[DEFAULT] | None = DEFAULT, | ||||||
|  |         homeassistant_exclude_database: bool | None = None, | ||||||
|     ) -> Backup | None: |     ) -> Backup | None: | ||||||
|         """Create a full backup.""" |         """Create a full backup.""" | ||||||
|  |         if self._get_base_path(location) == self.sys_config.path_backup: | ||||||
|  |             await Job.check_conditions( | ||||||
|  |                 self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_full" | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         backup = self._create_backup( |         backup = self._create_backup( | ||||||
|             name, BackupType.FULL, password, compressed, location |             name, BackupType.FULL, password, compressed, location | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         _LOGGER.info("Creating new full backup with slug %s", backup.slug) |         _LOGGER.info("Creating new full backup with slug %s", backup.slug) | ||||||
|         backup = await self._do_backup( |         backup = await self._do_backup( | ||||||
|             backup, self.sys_addons.installed, ALL_FOLDERS, True |             backup, | ||||||
|  |             self.sys_addons.installed, | ||||||
|  |             ALL_FOLDERS, | ||||||
|  |             True, | ||||||
|  |             homeassistant_exclude_database, | ||||||
|         ) |         ) | ||||||
|         if backup: |         if backup: | ||||||
|             _LOGGER.info("Creating full backup with slug %s completed", backup.slug) |             _LOGGER.info("Creating full backup with slug %s completed", backup.slug) | ||||||
| @@ -298,7 +323,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|         name="backup_manager_partial_backup", |         name="backup_manager_partial_backup", | ||||||
|         conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING], |         conditions=[JobCondition.RUNNING], | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|         on_condition=BackupJobError, |         on_condition=BackupJobError, | ||||||
|     ) |     ) | ||||||
| @@ -311,8 +336,14 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         homeassistant: bool = False, |         homeassistant: bool = False, | ||||||
|         compressed: bool = True, |         compressed: bool = True, | ||||||
|         location: Mount | type[DEFAULT] | None = DEFAULT, |         location: Mount | type[DEFAULT] | None = DEFAULT, | ||||||
|  |         homeassistant_exclude_database: bool | None = None, | ||||||
|     ) -> Backup | None: |     ) -> Backup | None: | ||||||
|         """Create a partial backup.""" |         """Create a partial backup.""" | ||||||
|  |         if self._get_base_path(location) == self.sys_config.path_backup: | ||||||
|  |             await Job.check_conditions( | ||||||
|  |                 self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_partial" | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         addons = addons or [] |         addons = addons or [] | ||||||
|         folders = folders or [] |         folders = folders or [] | ||||||
|  |  | ||||||
| @@ -337,7 +368,9 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                 continue |                 continue | ||||||
|             _LOGGER.warning("Add-on %s not found/installed", addon_slug) |             _LOGGER.warning("Add-on %s not found/installed", addon_slug) | ||||||
|  |  | ||||||
|         backup = await self._do_backup(backup, addon_list, folders, homeassistant) |         backup = await self._do_backup( | ||||||
|  |             backup, addon_list, folders, homeassistant, homeassistant_exclude_database | ||||||
|  |         ) | ||||||
|         if backup: |         if backup: | ||||||
|             _LOGGER.info("Creating partial backup with slug %s completed", backup.slug) |             _LOGGER.info("Creating partial backup with slug %s completed", backup.slug) | ||||||
|         return backup |         return backup | ||||||
| @@ -355,6 +388,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         Must be called from an existing restore job. |         Must be called from an existing restore job. | ||||||
|         """ |         """ | ||||||
|         addon_start_tasks: list[Awaitable[None]] | None = None |         addon_start_tasks: list[Awaitable[None]] | None = None | ||||||
|  |         success = True | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             task_hass: asyncio.Task | None = None |             task_hass: asyncio.Task | None = None | ||||||
| @@ -366,7 +400,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                 # Process folders |                 # Process folders | ||||||
|                 if folder_list: |                 if folder_list: | ||||||
|                     self._change_stage(RestoreJobStage.FOLDERS, backup) |                     self._change_stage(RestoreJobStage.FOLDERS, backup) | ||||||
|                     await backup.restore_folders(folder_list) |                     success = await backup.restore_folders(folder_list) | ||||||
|  |  | ||||||
|                 # Process Home-Assistant |                 # Process Home-Assistant | ||||||
|                 if homeassistant: |                 if homeassistant: | ||||||
| @@ -383,16 +417,20 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                         # Remove Add-on because it's not a part of the new env |                         # Remove Add-on because it's not a part of the new env | ||||||
|                         # Do it sequential avoid issue on slow IO |                         # Do it sequential avoid issue on slow IO | ||||||
|                         try: |                         try: | ||||||
|                             await addon.uninstall() |                             await self.sys_addons.uninstall(addon.slug) | ||||||
|                         except AddonsError: |                         except AddonsError: | ||||||
|                             _LOGGER.warning("Can't uninstall Add-on %s", addon.slug) |                             _LOGGER.warning("Can't uninstall Add-on %s", addon.slug) | ||||||
|  |                             success = False | ||||||
|  |  | ||||||
|                 if addon_list: |                 if addon_list: | ||||||
|                     self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup) |                     self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup) | ||||||
|                     await backup.restore_repositories(replace) |                     await backup.restore_repositories(replace) | ||||||
|  |  | ||||||
|                     self._change_stage(RestoreJobStage.ADDONS, backup) |                     self._change_stage(RestoreJobStage.ADDONS, backup) | ||||||
|                     addon_start_tasks = await backup.restore_addons(addon_list) |                     restore_success, addon_start_tasks = await backup.restore_addons( | ||||||
|  |                         addon_list | ||||||
|  |                     ) | ||||||
|  |                     success = success and restore_success | ||||||
|  |  | ||||||
|                 # Wait for Home Assistant Core update/downgrade |                 # Wait for Home Assistant Core update/downgrade | ||||||
|                 if task_hass: |                 if task_hass: | ||||||
| @@ -400,18 +438,24 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|                         RestoreJobStage.AWAIT_HOME_ASSISTANT_RESTART, backup |                         RestoreJobStage.AWAIT_HOME_ASSISTANT_RESTART, backup | ||||||
|                     ) |                     ) | ||||||
|                     await task_hass |                     await task_hass | ||||||
|  |         except BackupError: | ||||||
|  |             raise | ||||||
|         except Exception as err:  # pylint: disable=broad-except |         except Exception as err:  # pylint: disable=broad-except | ||||||
|             _LOGGER.exception("Restore %s error", backup.slug) |             _LOGGER.exception("Restore %s error", backup.slug) | ||||||
|             capture_exception(err) |             capture_exception(err) | ||||||
|             return False |             raise BackupError( | ||||||
|  |                 f"Restore {backup.slug} error, check logs for details" | ||||||
|  |             ) from err | ||||||
|         else: |         else: | ||||||
|             if addon_start_tasks: |             if addon_start_tasks: | ||||||
|                 self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup) |                 self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup) | ||||||
|                 # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere |                 # Failure to resume addons post restore is still a restore failure | ||||||
|  |                 if any( | ||||||
|                     await asyncio.gather(*addon_start_tasks, return_exceptions=True) |                     await asyncio.gather(*addon_start_tasks, return_exceptions=True) | ||||||
|  |                 ): | ||||||
|  |                     return False | ||||||
|  |  | ||||||
|             return True |             return success | ||||||
|         finally: |         finally: | ||||||
|             # Leave Home Assistant alone if it wasn't part of the restore |             # Leave Home Assistant alone if it wasn't part of the restore | ||||||
|             if homeassistant: |             if homeassistant: | ||||||
| @@ -446,31 +490,33 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         self.sys_jobs.current.reference = backup.slug |         self.sys_jobs.current.reference = backup.slug | ||||||
|  |  | ||||||
|         if backup.sys_type != BackupType.FULL: |         if backup.sys_type != BackupType.FULL: | ||||||
|             _LOGGER.error("%s is only a partial backup!", backup.slug) |             raise BackupInvalidError( | ||||||
|             return False |                 f"{backup.slug} is only a partial backup!", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.protected and not backup.set_password(password): |         if backup.protected and not backup.set_password(password): | ||||||
|             _LOGGER.error("Invalid password for backup %s", backup.slug) |             raise BackupInvalidError( | ||||||
|             return False |                 f"Invalid password for backup {backup.slug}", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.supervisor_version > self.sys_supervisor.version: |         if backup.supervisor_version > self.sys_supervisor.version: | ||||||
|             _LOGGER.error( |             raise BackupInvalidError( | ||||||
|                 "Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.", |                 f"Backup was made on supervisor version {backup.supervisor_version}, " | ||||||
|                 backup.supervisor_version, |                 f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.", | ||||||
|                 self.sys_supervisor.version, |                 _LOGGER.error, | ||||||
|             ) |             ) | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Full-Restore %s start", backup.slug) |         _LOGGER.info("Full-Restore %s start", backup.slug) | ||||||
|         self.sys_core.state = CoreState.FREEZE |         self.sys_core.state = CoreState.FREEZE | ||||||
|  |  | ||||||
|  |         try: | ||||||
|             # Stop Home-Assistant / Add-ons |             # Stop Home-Assistant / Add-ons | ||||||
|             await self.sys_core.shutdown() |             await self.sys_core.shutdown() | ||||||
|  |  | ||||||
|             success = await self._do_restore( |             success = await self._do_restore( | ||||||
|                 backup, backup.addon_list, backup.folders, True, True |                 backup, backup.addon_list, backup.folders, True, True | ||||||
|             ) |             ) | ||||||
|  |         finally: | ||||||
|             self.sys_core.state = CoreState.RUNNING |             self.sys_core.state = CoreState.RUNNING | ||||||
|  |  | ||||||
|         if success: |         if success: | ||||||
| @@ -510,28 +556,30 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|             homeassistant = True |             homeassistant = True | ||||||
|  |  | ||||||
|         if backup.protected and not backup.set_password(password): |         if backup.protected and not backup.set_password(password): | ||||||
|             _LOGGER.error("Invalid password for backup %s", backup.slug) |             raise BackupInvalidError( | ||||||
|             return False |                 f"Invalid password for backup {backup.slug}", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.homeassistant is None and homeassistant: |         if backup.homeassistant is None and homeassistant: | ||||||
|             _LOGGER.error("No Home Assistant Core data inside the backup") |             raise BackupInvalidError( | ||||||
|             return False |                 "No Home Assistant Core data inside the backup", _LOGGER.error | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         if backup.supervisor_version > self.sys_supervisor.version: |         if backup.supervisor_version > self.sys_supervisor.version: | ||||||
|             _LOGGER.error( |             raise BackupInvalidError( | ||||||
|                 "Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.", |                 f"Backup was made on supervisor version {backup.supervisor_version}, " | ||||||
|                 backup.supervisor_version, |                 f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.", | ||||||
|                 self.sys_supervisor.version, |                 _LOGGER.error, | ||||||
|             ) |             ) | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         _LOGGER.info("Partial-Restore %s start", backup.slug) |         _LOGGER.info("Partial-Restore %s start", backup.slug) | ||||||
|         self.sys_core.state = CoreState.FREEZE |         self.sys_core.state = CoreState.FREEZE | ||||||
|  |  | ||||||
|  |         try: | ||||||
|             success = await self._do_restore( |             success = await self._do_restore( | ||||||
|                 backup, addon_list, folder_list, homeassistant, False |                 backup, addon_list, folder_list, homeassistant, False | ||||||
|             ) |             ) | ||||||
|  |         finally: | ||||||
|             self.sys_core.state = CoreState.RUNNING |             self.sys_core.state = CoreState.RUNNING | ||||||
|  |  | ||||||
|         if success: |         if success: | ||||||
| @@ -582,7 +630,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|         try: |         try: | ||||||
|             try: |             try: | ||||||
|                 await asyncio.wait_for(self._thaw_event.wait(), timeout) |                 await asyncio.wait_for(self._thaw_event.wait(), timeout) | ||||||
|             except asyncio.TimeoutError: |             except TimeoutError: | ||||||
|                 _LOGGER.warning( |                 _LOGGER.warning( | ||||||
|                     "Timeout waiting for signal to thaw after manual freeze, beginning thaw now" |                     "Timeout waiting for signal to thaw after manual freeze, beginning thaw now" | ||||||
|                 ) |                 ) | ||||||
| @@ -591,7 +639,7 @@ class BackupManager(FileConfiguration, JobGroup): | |||||||
|             await self.sys_homeassistant.end_backup() |             await self.sys_homeassistant.end_backup() | ||||||
|  |  | ||||||
|             self._change_stage(BackupJobStage.ADDONS) |             self._change_stage(BackupJobStage.ADDONS) | ||||||
|             addon_start_tasks: list[Awaitable[None]] = [ |             addon_start_tasks: list[asyncio.Task] = [ | ||||||
|                 task |                 task | ||||||
|                 for task in await asyncio.gather( |                 for task in await asyncio.gather( | ||||||
|                     *[addon.end_backup() for addon in running_addons] |                     *[addon.end_backup() for addon in running_addons] | ||||||
|   | |||||||
| @@ -14,6 +14,7 @@ from ..const import ( | |||||||
|     ATTR_DATE, |     ATTR_DATE, | ||||||
|     ATTR_DAYS_UNTIL_STALE, |     ATTR_DAYS_UNTIL_STALE, | ||||||
|     ATTR_DOCKER, |     ATTR_DOCKER, | ||||||
|  |     ATTR_EXCLUDE_DATABASE, | ||||||
|     ATTR_FOLDERS, |     ATTR_FOLDERS, | ||||||
|     ATTR_HOMEASSISTANT, |     ATTR_HOMEASSISTANT, | ||||||
|     ATTR_NAME, |     ATTR_NAME, | ||||||
| @@ -103,6 +104,9 @@ SCHEMA_BACKUP = vol.Schema( | |||||||
|                     { |                     { | ||||||
|                         vol.Required(ATTR_VERSION): version_tag, |                         vol.Required(ATTR_VERSION): version_tag, | ||||||
|                         vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float), |                         vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float), | ||||||
|  |                         vol.Optional( | ||||||
|  |                             ATTR_EXCLUDE_DATABASE, default=False | ||||||
|  |                         ): vol.Boolean(), | ||||||
|                     }, |                     }, | ||||||
|                     extra=vol.REMOVE_EXTRA, |                     extra=vol.REMOVE_EXTRA, | ||||||
|                 ) |                 ) | ||||||
|   | |||||||
| @@ -6,7 +6,7 @@ import signal | |||||||
|  |  | ||||||
| from colorlog import ColoredFormatter | from colorlog import ColoredFormatter | ||||||
|  |  | ||||||
| from .addons import AddonManager | from .addons.manager import AddonManager | ||||||
| from .api import RestAPI | from .api import RestAPI | ||||||
| from .arch import CpuArch | from .arch import CpuArch | ||||||
| from .auth import Auth | from .auth import Auth | ||||||
| @@ -221,6 +221,14 @@ def initialize_system(coresys: CoreSys) -> None: | |||||||
|         ) |         ) | ||||||
|         config.path_emergency.mkdir() |         config.path_emergency.mkdir() | ||||||
|  |  | ||||||
|  |     # Addon Configs folder | ||||||
|  |     if not config.path_addon_configs.is_dir(): | ||||||
|  |         _LOGGER.debug( | ||||||
|  |             "Creating Supervisor add-on configs folder at '%s'", | ||||||
|  |             config.path_addon_configs, | ||||||
|  |         ) | ||||||
|  |         config.path_addon_configs.mkdir() | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_system_env(coresys: CoreSys) -> None: | def migrate_system_env(coresys: CoreSys) -> None: | ||||||
|     """Cleanup some stuff after update.""" |     """Cleanup some stuff after update.""" | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| """Bootstrap Supervisor.""" | """Bootstrap Supervisor.""" | ||||||
| from datetime import datetime | from datetime import UTC, datetime | ||||||
| import logging | import logging | ||||||
| import os | import os | ||||||
| from pathlib import Path, PurePath | from pathlib import Path, PurePath | ||||||
| @@ -48,8 +48,9 @@ MEDIA_DATA = PurePath("media") | |||||||
| MOUNTS_FOLDER = PurePath("mounts") | MOUNTS_FOLDER = PurePath("mounts") | ||||||
| MOUNTS_CREDENTIALS = PurePath(".mounts_credentials") | MOUNTS_CREDENTIALS = PurePath(".mounts_credentials") | ||||||
| EMERGENCY_DATA = PurePath("emergency") | EMERGENCY_DATA = PurePath("emergency") | ||||||
|  | ADDON_CONFIGS = PurePath("addon_configs") | ||||||
|  |  | ||||||
| DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat() | DEFAULT_BOOT_TIME = datetime.fromtimestamp(0, UTC).isoformat() | ||||||
|  |  | ||||||
| # We filter out UTC because it's the system default fallback | # We filter out UTC because it's the system default fallback | ||||||
| # Core also not respect the cotnainer timezone and reset timezones | # Core also not respect the cotnainer timezone and reset timezones | ||||||
| @@ -163,7 +164,7 @@ class CoreConfig(FileConfiguration): | |||||||
|  |  | ||||||
|         boot_time = parse_datetime(boot_str) |         boot_time = parse_datetime(boot_str) | ||||||
|         if not boot_time: |         if not boot_time: | ||||||
|             return datetime.utcfromtimestamp(1) |             return datetime.fromtimestamp(1, UTC) | ||||||
|         return boot_time |         return boot_time | ||||||
|  |  | ||||||
|     @last_boot.setter |     @last_boot.setter | ||||||
| @@ -231,6 +232,16 @@ class CoreConfig(FileConfiguration): | |||||||
|         """Return root add-on data folder external for Docker.""" |         """Return root add-on data folder external for Docker.""" | ||||||
|         return PurePath(self.path_extern_supervisor, ADDONS_DATA) |         return PurePath(self.path_extern_supervisor, ADDONS_DATA) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_addon_configs(self) -> Path: | ||||||
|  |         """Return root Add-on configs folder.""" | ||||||
|  |         return self.path_supervisor / ADDON_CONFIGS | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def path_extern_addon_configs(self) -> PurePath: | ||||||
|  |         """Return root Add-on configs folder external for Docker.""" | ||||||
|  |         return PurePath(self.path_extern_supervisor, ADDON_CONFIGS) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def path_audio(self) -> Path: |     def path_audio(self) -> Path: | ||||||
|         """Return root audio data folder.""" |         """Return root audio data folder.""" | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ from enum import StrEnum | |||||||
| from ipaddress import ip_network | from ipaddress import ip_network | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from sys import version_info as systemversion | from sys import version_info as systemversion | ||||||
|  | from typing import Self | ||||||
|  |  | ||||||
| from aiohttp import __version__ as aiohttpversion | from aiohttp import __version__ as aiohttpversion | ||||||
|  |  | ||||||
| @@ -19,6 +20,7 @@ SUPERVISOR_DATA = Path("/data") | |||||||
| FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json") | FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json") | ||||||
| FILE_HASSIO_AUTH = Path(SUPERVISOR_DATA, "auth.json") | FILE_HASSIO_AUTH = Path(SUPERVISOR_DATA, "auth.json") | ||||||
| FILE_HASSIO_BACKUPS = Path(SUPERVISOR_DATA, "backups.json") | FILE_HASSIO_BACKUPS = Path(SUPERVISOR_DATA, "backups.json") | ||||||
|  | FILE_HASSIO_BOARD = Path(SUPERVISOR_DATA, "board.json") | ||||||
| FILE_HASSIO_CONFIG = Path(SUPERVISOR_DATA, "config.json") | FILE_HASSIO_CONFIG = Path(SUPERVISOR_DATA, "config.json") | ||||||
| FILE_HASSIO_DISCOVERY = Path(SUPERVISOR_DATA, "discovery.json") | FILE_HASSIO_DISCOVERY = Path(SUPERVISOR_DATA, "discovery.json") | ||||||
| FILE_HASSIO_DOCKER = Path(SUPERVISOR_DATA, "docker.json") | FILE_HASSIO_DOCKER = Path(SUPERVISOR_DATA, "docker.json") | ||||||
| @@ -88,6 +90,7 @@ REQUEST_FROM = "HASSIO_FROM" | |||||||
| ATTR_ACCESS_TOKEN = "access_token" | ATTR_ACCESS_TOKEN = "access_token" | ||||||
| ATTR_ACCESSPOINTS = "accesspoints" | ATTR_ACCESSPOINTS = "accesspoints" | ||||||
| ATTR_ACTIVE = "active" | ATTR_ACTIVE = "active" | ||||||
|  | ATTR_ACTIVITY_LED = "activity_led" | ||||||
| ATTR_ADDON = "addon" | ATTR_ADDON = "addon" | ||||||
| ATTR_ADDONS = "addons" | ATTR_ADDONS = "addons" | ||||||
| ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list" | ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list" | ||||||
| @@ -113,6 +116,7 @@ ATTR_BACKUP_EXCLUDE = "backup_exclude" | |||||||
| ATTR_BACKUP_POST = "backup_post" | ATTR_BACKUP_POST = "backup_post" | ||||||
| ATTR_BACKUP_PRE = "backup_pre" | ATTR_BACKUP_PRE = "backup_pre" | ||||||
| ATTR_BACKUPS = "backups" | ATTR_BACKUPS = "backups" | ||||||
|  | ATTR_BACKUPS_EXCLUDE_DATABASE = "backups_exclude_database" | ||||||
| ATTR_BLK_READ = "blk_read" | ATTR_BLK_READ = "blk_read" | ||||||
| ATTR_BLK_WRITE = "blk_write" | ATTR_BLK_WRITE = "blk_write" | ||||||
| ATTR_BOARD = "board" | ATTR_BOARD = "board" | ||||||
| @@ -152,9 +156,11 @@ ATTR_DIAGNOSTICS = "diagnostics" | |||||||
| ATTR_DISCOVERY = "discovery" | ATTR_DISCOVERY = "discovery" | ||||||
| ATTR_DISK = "disk" | ATTR_DISK = "disk" | ||||||
| ATTR_DISK_FREE = "disk_free" | ATTR_DISK_FREE = "disk_free" | ||||||
|  | ATTR_DISK_LED = "disk_led" | ||||||
| ATTR_DISK_LIFE_TIME = "disk_life_time" | ATTR_DISK_LIFE_TIME = "disk_life_time" | ||||||
| ATTR_DISK_TOTAL = "disk_total" | ATTR_DISK_TOTAL = "disk_total" | ||||||
| ATTR_DISK_USED = "disk_used" | ATTR_DISK_USED = "disk_used" | ||||||
|  | ATTR_DISPLAYNAME = "displayname" | ||||||
| ATTR_DNS = "dns" | ATTR_DNS = "dns" | ||||||
| ATTR_DOCKER = "docker" | ATTR_DOCKER = "docker" | ||||||
| ATTR_DOCKER_API = "docker_api" | ATTR_DOCKER_API = "docker_api" | ||||||
| @@ -164,6 +170,7 @@ ATTR_ENABLE = "enable" | |||||||
| ATTR_ENABLED = "enabled" | ATTR_ENABLED = "enabled" | ||||||
| ATTR_ENVIRONMENT = "environment" | ATTR_ENVIRONMENT = "environment" | ||||||
| ATTR_EVENT = "event" | ATTR_EVENT = "event" | ||||||
|  | ATTR_EXCLUDE_DATABASE = "exclude_database" | ||||||
| ATTR_FEATURES = "features" | ATTR_FEATURES = "features" | ||||||
| ATTR_FILENAME = "filename" | ATTR_FILENAME = "filename" | ||||||
| ATTR_FLAGS = "flags" | ATTR_FLAGS = "flags" | ||||||
| @@ -177,7 +184,9 @@ ATTR_HASSIO_API = "hassio_api" | |||||||
| ATTR_HASSIO_ROLE = "hassio_role" | ATTR_HASSIO_ROLE = "hassio_role" | ||||||
| ATTR_HASSOS = "hassos" | ATTR_HASSOS = "hassos" | ||||||
| ATTR_HEALTHY = "healthy" | ATTR_HEALTHY = "healthy" | ||||||
|  | ATTR_HEARTBEAT_LED = "heartbeat_led" | ||||||
| ATTR_HOMEASSISTANT = "homeassistant" | ATTR_HOMEASSISTANT = "homeassistant" | ||||||
|  | ATTR_HOMEASSISTANT_EXCLUDE_DATABASE = "homeassistant_exclude_database" | ||||||
| ATTR_HOMEASSISTANT_API = "homeassistant_api" | ATTR_HOMEASSISTANT_API = "homeassistant_api" | ||||||
| ATTR_HOST = "host" | ATTR_HOST = "host" | ||||||
| ATTR_HOST_DBUS = "host_dbus" | ATTR_HOST_DBUS = "host_dbus" | ||||||
| @@ -252,6 +261,7 @@ ATTR_PLUGINS = "plugins" | |||||||
| ATTR_PORT = "port" | ATTR_PORT = "port" | ||||||
| ATTR_PORTS = "ports" | ATTR_PORTS = "ports" | ||||||
| ATTR_PORTS_DESCRIPTION = "ports_description" | ATTR_PORTS_DESCRIPTION = "ports_description" | ||||||
|  | ATTR_POWER_LED = "power_led" | ||||||
| ATTR_PREFIX = "prefix" | ATTR_PREFIX = "prefix" | ||||||
| ATTR_PRIMARY = "primary" | ATTR_PRIMARY = "primary" | ||||||
| ATTR_PRIORITY = "priority" | ATTR_PRIORITY = "priority" | ||||||
| @@ -315,6 +325,7 @@ ATTR_UPDATE_KEY = "update_key" | |||||||
| ATTR_URL = "url" | ATTR_URL = "url" | ||||||
| ATTR_USB = "usb" | ATTR_USB = "usb" | ||||||
| ATTR_USER = "user" | ATTR_USER = "user" | ||||||
|  | ATTR_USER_LED = "user_led" | ||||||
| ATTR_USERNAME = "username" | ATTR_USERNAME = "username" | ||||||
| ATTR_UUID = "uuid" | ATTR_UUID = "uuid" | ||||||
| ATTR_VALID = "valid" | ATTR_VALID = "valid" | ||||||
| @@ -334,14 +345,6 @@ PROVIDE_SERVICE = "provide" | |||||||
| NEED_SERVICE = "need" | NEED_SERVICE = "need" | ||||||
| WANT_SERVICE = "want" | WANT_SERVICE = "want" | ||||||
|  |  | ||||||
|  |  | ||||||
| MAP_CONFIG = "config" |  | ||||||
| MAP_SSL = "ssl" |  | ||||||
| MAP_ADDONS = "addons" |  | ||||||
| MAP_BACKUP = "backup" |  | ||||||
| MAP_SHARE = "share" |  | ||||||
| MAP_MEDIA = "media" |  | ||||||
|  |  | ||||||
| ARCH_ARMHF = "armhf" | ARCH_ARMHF = "armhf" | ||||||
| ARCH_ARMV7 = "armv7" | ARCH_ARMV7 = "armv7" | ||||||
| ARCH_AARCH64 = "aarch64" | ARCH_AARCH64 = "aarch64" | ||||||
| @@ -479,6 +482,23 @@ class IngressSessionDataUser: | |||||||
|     display_name: str | None = None |     display_name: str | None = None | ||||||
|     username: str | None = None |     username: str | None = None | ||||||
|  |  | ||||||
|  |     def to_dict(self) -> dict[str, str | None]: | ||||||
|  |         """Get dictionary representation.""" | ||||||
|  |         return { | ||||||
|  |             ATTR_ID: self.id, | ||||||
|  |             ATTR_DISPLAYNAME: self.display_name, | ||||||
|  |             ATTR_USERNAME: self.username, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def from_dict(cls, data: dict[str, str | None]) -> Self: | ||||||
|  |         """Return object from dictionary representation.""" | ||||||
|  |         return cls( | ||||||
|  |             id=data[ATTR_ID], | ||||||
|  |             display_name=data.get(ATTR_DISPLAYNAME), | ||||||
|  |             username=data.get(ATTR_USERNAME), | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | @dataclass | ||||||
| class IngressSessionData: | class IngressSessionData: | ||||||
| @@ -486,6 +506,15 @@ class IngressSessionData: | |||||||
|  |  | ||||||
|     user: IngressSessionDataUser |     user: IngressSessionDataUser | ||||||
|  |  | ||||||
|  |     def to_dict(self) -> dict[str, dict[str, str | None]]: | ||||||
|  |         """Get dictionary representation.""" | ||||||
|  |         return {ATTR_USER: self.user.to_dict()} | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def from_dict(cls, data: dict[str, dict[str, str | None]]) -> Self: | ||||||
|  |         """Return object from dictionary representation.""" | ||||||
|  |         return cls(user=IngressSessionDataUser.from_dict(data[ATTR_USER])) | ||||||
|  |  | ||||||
|  |  | ||||||
| STARTING_STATES = [ | STARTING_STATES = [ | ||||||
|     CoreState.INITIALIZE, |     CoreState.INITIALIZE, | ||||||
|   | |||||||
| @@ -28,7 +28,7 @@ from .homeassistant.core import LANDINGPAGE | |||||||
| from .resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason | from .resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason | ||||||
| from .utils.dt import utcnow | from .utils.dt import utcnow | ||||||
| from .utils.sentry import capture_exception | from .utils.sentry import capture_exception | ||||||
| from .utils.whoami import retrieve_whoami | from .utils.whoami import WhoamiData, retrieve_whoami | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -250,7 +250,7 @@ class Core(CoreSysAttributes): | |||||||
|                 except HomeAssistantError as err: |                 except HomeAssistantError as err: | ||||||
|                     capture_exception(err) |                     capture_exception(err) | ||||||
|             else: |             else: | ||||||
|                 _LOGGER.info("Skiping start of Home Assistant") |                 _LOGGER.info("Skipping start of Home Assistant") | ||||||
|  |  | ||||||
|             # Core is not running |             # Core is not running | ||||||
|             if self.sys_homeassistant.core.error_state: |             if self.sys_homeassistant.core.error_state: | ||||||
| @@ -309,7 +309,7 @@ class Core(CoreSysAttributes): | |||||||
|                         ) |                         ) | ||||||
|                     ] |                     ] | ||||||
|                 ) |                 ) | ||||||
|         except asyncio.TimeoutError: |         except TimeoutError: | ||||||
|             _LOGGER.warning("Stage 1: Force Shutdown!") |             _LOGGER.warning("Stage 1: Force Shutdown!") | ||||||
|  |  | ||||||
|         # Stage 2 |         # Stage 2 | ||||||
| @@ -326,7 +326,7 @@ class Core(CoreSysAttributes): | |||||||
|                         ) |                         ) | ||||||
|                     ] |                     ] | ||||||
|                 ) |                 ) | ||||||
|         except asyncio.TimeoutError: |         except TimeoutError: | ||||||
|             _LOGGER.warning("Stage 2: Force Shutdown!") |             _LOGGER.warning("Stage 2: Force Shutdown!") | ||||||
|  |  | ||||||
|         self.state = CoreState.CLOSE |         self.state = CoreState.CLOSE | ||||||
| @@ -363,6 +363,13 @@ class Core(CoreSysAttributes): | |||||||
|         self.sys_config.last_boot = self.sys_hardware.helper.last_boot |         self.sys_config.last_boot = self.sys_hardware.helper.last_boot | ||||||
|         self.sys_config.save_data() |         self.sys_config.save_data() | ||||||
|  |  | ||||||
|  |     async def _retrieve_whoami(self, with_ssl: bool) -> WhoamiData | None: | ||||||
|  |         try: | ||||||
|  |             return await retrieve_whoami(self.sys_websession, with_ssl) | ||||||
|  |         except WhoamiSSLError: | ||||||
|  |             _LOGGER.info("Whoami service SSL error") | ||||||
|  |             return None | ||||||
|  |  | ||||||
|     async def _adjust_system_datetime(self): |     async def _adjust_system_datetime(self): | ||||||
|         """Adjust system time/date on startup.""" |         """Adjust system time/date on startup.""" | ||||||
|         # If no timezone is detect or set |         # If no timezone is detect or set | ||||||
| @@ -375,19 +382,13 @@ class Core(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Get Timezone data |         # Get Timezone data | ||||||
|         try: |         try: | ||||||
|             data = await retrieve_whoami(self.sys_websession) |             data = await self._retrieve_whoami(True) | ||||||
|         except WhoamiSSLError: |  | ||||||
|             pass |  | ||||||
|         except WhoamiError as err: |  | ||||||
|             _LOGGER.warning("Can't adjust Time/Date settings: %s", err) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|             # SSL Date Issue & possible time drift |             # SSL Date Issue & possible time drift | ||||||
|             if not data: |             if not data: | ||||||
|             try: |                 data = await self._retrieve_whoami(False) | ||||||
|                 data = await retrieve_whoami(self.sys_websession, with_ssl=False) |  | ||||||
|         except WhoamiError as err: |         except WhoamiError as err: | ||||||
|                 _LOGGER.error("Can't adjust Time/Date settings: %s", err) |             _LOGGER.warning("Can't adjust Time/Date settings: %s", err) | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         self.sys_config.timezone = self.sys_config.timezone or data.timezone |         self.sys_config.timezone = self.sys_config.timezone or data.timezone | ||||||
|   | |||||||
| @@ -18,7 +18,7 @@ from .const import ENV_SUPERVISOR_DEV, SERVER_SOFTWARE | |||||||
| from .utils.dt import UTC, get_time_zone | from .utils.dt import UTC, get_time_zone | ||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from .addons import AddonManager |     from .addons.manager import AddonManager | ||||||
|     from .api import RestAPI |     from .api import RestAPI | ||||||
|     from .arch import CpuArch |     from .arch import CpuArch | ||||||
|     from .auth import Auth |     from .auth import Auth | ||||||
| @@ -725,7 +725,7 @@ class CoreSysAttributes: | |||||||
|     def sys_run_in_executor( |     def sys_run_in_executor( | ||||||
|         self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any] |         self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any] | ||||||
|     ) -> Coroutine[Any, Any, T]: |     ) -> Coroutine[Any, Any, T]: | ||||||
|         """Add an job to the executor pool.""" |         """Add a job to the executor pool.""" | ||||||
|         return self.coresys.run_in_executor(funct, *args, **kwargs) |         return self.coresys.run_in_executor(funct, *args, **kwargs) | ||||||
|  |  | ||||||
|     def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task: |     def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task: | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ | |||||||
|   "raspberrypi3-64": ["aarch64", "armv7", "armhf"], |   "raspberrypi3-64": ["aarch64", "armv7", "armhf"], | ||||||
|   "raspberrypi4": ["armv7", "armhf"], |   "raspberrypi4": ["armv7", "armhf"], | ||||||
|   "raspberrypi4-64": ["aarch64", "armv7", "armhf"], |   "raspberrypi4-64": ["aarch64", "armv7", "armhf"], | ||||||
|  |   "raspberrypi5-64": ["aarch64", "armv7", "armhf"], | ||||||
|   "yellow": ["aarch64", "armv7", "armhf"], |   "yellow": ["aarch64", "armv7", "armhf"], | ||||||
|   "green": ["aarch64", "armv7", "armhf"], |   "green": ["aarch64", "armv7", "armhf"], | ||||||
|   "tinker": ["armv7", "armhf"], |   "tinker": ["armv7", "armhf"], | ||||||
|   | |||||||
| @@ -6,7 +6,7 @@ from typing import Any | |||||||
| from awesomeversion import AwesomeVersion | from awesomeversion import AwesomeVersion | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ...exceptions import DBusError, DBusInterfaceError | from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DBUS_ATTR_DIAGNOSTICS, |     DBUS_ATTR_DIAGNOSTICS, | ||||||
|     DBUS_ATTR_VERSION, |     DBUS_ATTR_VERSION, | ||||||
| @@ -99,7 +99,7 @@ class OSAgent(DBusInterfaceProxy): | |||||||
|             await asyncio.gather(*[dbus.connect(bus) for dbus in self.all]) |             await asyncio.gather(*[dbus.connect(bus) for dbus in self.all]) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to OS-Agent") |             _LOGGER.warning("Can't connect to OS-Agent") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No OS-Agent support on the host. Some Host functions have been disabled." |                 "No OS-Agent support on the host. Some Host functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -11,7 +11,8 @@ from ...const import ( | |||||||
|     DBUS_OBJECT_HAOS_BOARDS, |     DBUS_OBJECT_HAOS_BOARDS, | ||||||
| ) | ) | ||||||
| from ...interface import DBusInterfaceProxy, dbus_property | from ...interface import DBusInterfaceProxy, dbus_property | ||||||
| from .const import BOARD_NAME_SUPERVISED, BOARD_NAME_YELLOW | from .const import BOARD_NAME_GREEN, BOARD_NAME_SUPERVISED, BOARD_NAME_YELLOW | ||||||
|  | from .green import Green | ||||||
| from .interface import BoardProxy | from .interface import BoardProxy | ||||||
| from .supervised import Supervised | from .supervised import Supervised | ||||||
| from .yellow import Yellow | from .yellow import Yellow | ||||||
| @@ -39,6 +40,14 @@ class BoardManager(DBusInterfaceProxy): | |||||||
|         """Get board name.""" |         """Get board name.""" | ||||||
|         return self.properties[DBUS_ATTR_BOARD] |         return self.properties[DBUS_ATTR_BOARD] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def green(self) -> Green: | ||||||
|  |         """Get Green board.""" | ||||||
|  |         if self.board != BOARD_NAME_GREEN: | ||||||
|  |             raise BoardInvalidError("Green board is not in use", _LOGGER.error) | ||||||
|  |  | ||||||
|  |         return self._board_proxy | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def supervised(self) -> Supervised: |     def supervised(self) -> Supervised: | ||||||
|         """Get Supervised board.""" |         """Get Supervised board.""" | ||||||
| @@ -61,6 +70,8 @@ class BoardManager(DBusInterfaceProxy): | |||||||
|  |  | ||||||
|         if self.board == BOARD_NAME_YELLOW: |         if self.board == BOARD_NAME_YELLOW: | ||||||
|             self._board_proxy = Yellow() |             self._board_proxy = Yellow() | ||||||
|  |         elif self.board == BOARD_NAME_GREEN: | ||||||
|  |             self._board_proxy = Green() | ||||||
|         elif self.board == BOARD_NAME_SUPERVISED: |         elif self.board == BOARD_NAME_SUPERVISED: | ||||||
|             self._board_proxy = Supervised() |             self._board_proxy = Supervised() | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,4 +1,5 @@ | |||||||
| """Constants for boards.""" | """Constants for boards.""" | ||||||
|  |  | ||||||
|  | BOARD_NAME_GREEN = "Green" | ||||||
| BOARD_NAME_SUPERVISED = "Supervised" | BOARD_NAME_SUPERVISED = "Supervised" | ||||||
| BOARD_NAME_YELLOW = "Yellow" | BOARD_NAME_YELLOW = "Yellow" | ||||||
|   | |||||||
							
								
								
									
										65
									
								
								supervisor/dbus/agent/boards/green.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								supervisor/dbus/agent/boards/green.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | |||||||
|  | """Green board management.""" | ||||||
|  |  | ||||||
|  | import asyncio | ||||||
|  |  | ||||||
|  | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
|  | from ....const import ATTR_ACTIVITY_LED, ATTR_POWER_LED, ATTR_USER_LED | ||||||
|  | from ...const import DBUS_ATTR_ACTIVITY_LED, DBUS_ATTR_POWER_LED, DBUS_ATTR_USER_LED | ||||||
|  | from ...interface import dbus_property | ||||||
|  | from .const import BOARD_NAME_GREEN | ||||||
|  | from .interface import BoardProxy | ||||||
|  | from .validate import SCHEMA_GREEN_BOARD | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Green(BoardProxy): | ||||||
|  |     """Green board manager object.""" | ||||||
|  |  | ||||||
|  |     def __init__(self) -> None: | ||||||
|  |         """Initialize properties.""" | ||||||
|  |         super().__init__(BOARD_NAME_GREEN, SCHEMA_GREEN_BOARD) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     @dbus_property | ||||||
|  |     def activity_led(self) -> bool: | ||||||
|  |         """Get activity LED enabled.""" | ||||||
|  |         return self.properties[DBUS_ATTR_ACTIVITY_LED] | ||||||
|  |  | ||||||
|  |     @activity_led.setter | ||||||
|  |     def activity_led(self, enabled: bool) -> None: | ||||||
|  |         """Enable/disable activity LED.""" | ||||||
|  |         self._data[ATTR_ACTIVITY_LED] = enabled | ||||||
|  |         asyncio.create_task(self.dbus.Boards.Green.set_activity_led(enabled)) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     @dbus_property | ||||||
|  |     def power_led(self) -> bool: | ||||||
|  |         """Get power LED enabled.""" | ||||||
|  |         return self.properties[DBUS_ATTR_POWER_LED] | ||||||
|  |  | ||||||
|  |     @power_led.setter | ||||||
|  |     def power_led(self, enabled: bool) -> None: | ||||||
|  |         """Enable/disable power LED.""" | ||||||
|  |         self._data[ATTR_POWER_LED] = enabled | ||||||
|  |         asyncio.create_task(self.dbus.Boards.Green.set_power_led(enabled)) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     @dbus_property | ||||||
|  |     def user_led(self) -> bool: | ||||||
|  |         """Get user LED enabled.""" | ||||||
|  |         return self.properties[DBUS_ATTR_USER_LED] | ||||||
|  |  | ||||||
|  |     @user_led.setter | ||||||
|  |     def user_led(self, enabled: bool) -> None: | ||||||
|  |         """Enable/disable disk LED.""" | ||||||
|  |         self._data[ATTR_USER_LED] = enabled | ||||||
|  |         asyncio.create_task(self.dbus.Boards.Green.set_user_led(enabled)) | ||||||
|  |  | ||||||
|  |     async def connect(self, bus: MessageBus) -> None: | ||||||
|  |         """Connect to D-Bus.""" | ||||||
|  |         await super().connect(bus) | ||||||
|  |  | ||||||
|  |         # Set LEDs based on settings on connect | ||||||
|  |         self.activity_led = self._data[ATTR_ACTIVITY_LED] | ||||||
|  |         self.power_led = self._data[ATTR_POWER_LED] | ||||||
|  |         self.user_led = self._data[ATTR_USER_LED] | ||||||
| @@ -1,17 +1,23 @@ | |||||||
| """Board dbus proxy interface.""" | """Board dbus proxy interface.""" | ||||||
|  |  | ||||||
|  | from voluptuous import Schema | ||||||
|  |  | ||||||
|  | from ....const import FILE_HASSIO_BOARD | ||||||
|  | from ....utils.common import FileConfiguration | ||||||
| from ...const import DBUS_IFACE_HAOS_BOARDS, DBUS_NAME_HAOS, DBUS_OBJECT_HAOS_BOARDS | from ...const import DBUS_IFACE_HAOS_BOARDS, DBUS_NAME_HAOS, DBUS_OBJECT_HAOS_BOARDS | ||||||
| from ...interface import DBusInterfaceProxy | from ...interface import DBusInterfaceProxy | ||||||
|  | from .validate import SCHEMA_BASE_BOARD | ||||||
|  |  | ||||||
|  |  | ||||||
| class BoardProxy(DBusInterfaceProxy): | class BoardProxy(FileConfiguration, DBusInterfaceProxy): | ||||||
|     """DBus interface proxy for os board.""" |     """DBus interface proxy for os board.""" | ||||||
|  |  | ||||||
|     bus_name: str = DBUS_NAME_HAOS |     bus_name: str = DBUS_NAME_HAOS | ||||||
|  |  | ||||||
|     def __init__(self, name: str) -> None: |     def __init__(self, name: str, file_schema: Schema | None = None) -> None: | ||||||
|         """Initialize properties.""" |         """Initialize properties.""" | ||||||
|         super().__init__() |         super().__init__(FILE_HASSIO_BOARD, file_schema or SCHEMA_BASE_BOARD) | ||||||
|  |         super(FileConfiguration, self).__init__() | ||||||
|  |  | ||||||
|         self._name: str = name |         self._name: str = name | ||||||
|         self.object_path: str = f"{DBUS_OBJECT_HAOS_BOARDS}/{name}" |         self.object_path: str = f"{DBUS_OBJECT_HAOS_BOARDS}/{name}" | ||||||
|   | |||||||
							
								
								
									
										32
									
								
								supervisor/dbus/agent/boards/validate.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								supervisor/dbus/agent/boards/validate.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | |||||||
|  | """Validation for board config.""" | ||||||
|  |  | ||||||
|  | import voluptuous as vol | ||||||
|  |  | ||||||
|  | from ....const import ( | ||||||
|  |     ATTR_ACTIVITY_LED, | ||||||
|  |     ATTR_DISK_LED, | ||||||
|  |     ATTR_HEARTBEAT_LED, | ||||||
|  |     ATTR_POWER_LED, | ||||||
|  |     ATTR_USER_LED, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | # pylint: disable=no-value-for-parameter | ||||||
|  | SCHEMA_BASE_BOARD = vol.Schema({}, extra=vol.REMOVE_EXTRA) | ||||||
|  |  | ||||||
|  | SCHEMA_GREEN_BOARD = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Optional(ATTR_ACTIVITY_LED, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_POWER_LED, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_USER_LED, default=True): vol.Boolean(), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | SCHEMA_YELLOW_BOARD = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Optional(ATTR_DISK_LED, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_HEARTBEAT_LED, default=True): vol.Boolean(), | ||||||
|  |         vol.Optional(ATTR_POWER_LED, default=True): vol.Boolean(), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
| @@ -2,10 +2,14 @@ | |||||||
|  |  | ||||||
| import asyncio | import asyncio | ||||||
|  |  | ||||||
|  | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
|  | from ....const import ATTR_DISK_LED, ATTR_HEARTBEAT_LED, ATTR_POWER_LED | ||||||
| from ...const import DBUS_ATTR_DISK_LED, DBUS_ATTR_HEARTBEAT_LED, DBUS_ATTR_POWER_LED | from ...const import DBUS_ATTR_DISK_LED, DBUS_ATTR_HEARTBEAT_LED, DBUS_ATTR_POWER_LED | ||||||
| from ...interface import dbus_property | from ...interface import dbus_property | ||||||
| from .const import BOARD_NAME_YELLOW | from .const import BOARD_NAME_YELLOW | ||||||
| from .interface import BoardProxy | from .interface import BoardProxy | ||||||
|  | from .validate import SCHEMA_YELLOW_BOARD | ||||||
|  |  | ||||||
|  |  | ||||||
| class Yellow(BoardProxy): | class Yellow(BoardProxy): | ||||||
| @@ -13,7 +17,7 @@ class Yellow(BoardProxy): | |||||||
|  |  | ||||||
|     def __init__(self) -> None: |     def __init__(self) -> None: | ||||||
|         """Initialize properties.""" |         """Initialize properties.""" | ||||||
|         super().__init__(BOARD_NAME_YELLOW) |         super().__init__(BOARD_NAME_YELLOW, SCHEMA_YELLOW_BOARD) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     @dbus_property |     @dbus_property | ||||||
| @@ -24,6 +28,7 @@ class Yellow(BoardProxy): | |||||||
|     @heartbeat_led.setter |     @heartbeat_led.setter | ||||||
|     def heartbeat_led(self, enabled: bool) -> None: |     def heartbeat_led(self, enabled: bool) -> None: | ||||||
|         """Enable/disable heartbeat LED.""" |         """Enable/disable heartbeat LED.""" | ||||||
|  |         self._data[ATTR_HEARTBEAT_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled)) |         asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled)) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -35,6 +40,7 @@ class Yellow(BoardProxy): | |||||||
|     @power_led.setter |     @power_led.setter | ||||||
|     def power_led(self, enabled: bool) -> None: |     def power_led(self, enabled: bool) -> None: | ||||||
|         """Enable/disable power LED.""" |         """Enable/disable power LED.""" | ||||||
|  |         self._data[ATTR_POWER_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled)) |         asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled)) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -46,4 +52,14 @@ class Yellow(BoardProxy): | |||||||
|     @disk_led.setter |     @disk_led.setter | ||||||
|     def disk_led(self, enabled: bool) -> None: |     def disk_led(self, enabled: bool) -> None: | ||||||
|         """Enable/disable disk LED.""" |         """Enable/disable disk LED.""" | ||||||
|  |         self._data[ATTR_DISK_LED] = enabled | ||||||
|         asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled)) |         asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled)) | ||||||
|  |  | ||||||
|  |     async def connect(self, bus: MessageBus) -> None: | ||||||
|  |         """Connect to D-Bus.""" | ||||||
|  |         await super().connect(bus) | ||||||
|  |  | ||||||
|  |         # Set LEDs based on settings on connect | ||||||
|  |         self.disk_led = self._data[ATTR_DISK_LED] | ||||||
|  |         self.heartbeat_led = self._data[ATTR_HEARTBEAT_LED] | ||||||
|  |         self.power_led = self._data[ATTR_POWER_LED] | ||||||
|   | |||||||
| @@ -64,6 +64,7 @@ DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2/Manager" | |||||||
| DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint" | DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint" | ||||||
| DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection" | DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection" | ||||||
| DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections" | DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections" | ||||||
|  | DBUS_ATTR_ACTIVITY_LED = "ActivityLED" | ||||||
| DBUS_ATTR_ADDRESS_DATA = "AddressData" | DBUS_ATTR_ADDRESS_DATA = "AddressData" | ||||||
| DBUS_ATTR_BITRATE = "Bitrate" | DBUS_ATTR_BITRATE = "Bitrate" | ||||||
| DBUS_ATTR_BOARD = "Board" | DBUS_ATTR_BOARD = "Board" | ||||||
| @@ -169,6 +170,7 @@ DBUS_ATTR_TIMEUSEC = "TimeUSec" | |||||||
| DBUS_ATTR_TIMEZONE = "Timezone" | DBUS_ATTR_TIMEZONE = "Timezone" | ||||||
| DBUS_ATTR_TRANSACTION_STATISTICS = "TransactionStatistics" | DBUS_ATTR_TRANSACTION_STATISTICS = "TransactionStatistics" | ||||||
| DBUS_ATTR_TYPE = "Type" | DBUS_ATTR_TYPE = "Type" | ||||||
|  | DBUS_ATTR_USER_LED = "UserLED" | ||||||
| DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC = "UserspaceTimestampMonotonic" | DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC = "UserspaceTimestampMonotonic" | ||||||
| DBUS_ATTR_UUID_UPPERCASE = "UUID" | DBUS_ATTR_UUID_UPPERCASE = "UUID" | ||||||
| DBUS_ATTR_UUID = "Uuid" | DBUS_ATTR_UUID = "Uuid" | ||||||
|   | |||||||
| @@ -3,7 +3,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_CHASSIS, |     DBUS_ATTR_CHASSIS, | ||||||
|     DBUS_ATTR_DEPLOYMENT, |     DBUS_ATTR_DEPLOYMENT, | ||||||
| @@ -39,7 +39,7 @@ class Hostname(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-hostname") |             _LOGGER.warning("Can't connect to systemd-hostname") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No hostname support on the host. Hostname functions have been disabled." |                 "No hostname support on the host. Hostname functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -3,7 +3,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from .const import DBUS_NAME_LOGIND, DBUS_OBJECT_LOGIND | from .const import DBUS_NAME_LOGIND, DBUS_OBJECT_LOGIND | ||||||
| from .interface import DBusInterface | from .interface import DBusInterface | ||||||
| from .utils import dbus_connected | from .utils import dbus_connected | ||||||
| @@ -28,8 +28,8 @@ class Logind(DBusInterface): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-logind") |             _LOGGER.warning("Can't connect to systemd-logind") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.info("No systemd-logind support on the host.") |             _LOGGER.warning("No systemd-logind support on the host.") | ||||||
|  |  | ||||||
|     @dbus_connected |     @dbus_connected | ||||||
|     async def reboot(self) -> None: |     async def reboot(self) -> None: | ||||||
|   | |||||||
| @@ -9,6 +9,8 @@ from ...exceptions import ( | |||||||
|     DBusError, |     DBusError, | ||||||
|     DBusFatalError, |     DBusFatalError, | ||||||
|     DBusInterfaceError, |     DBusInterfaceError, | ||||||
|  |     DBusNoReplyError, | ||||||
|  |     DBusServiceUnkownError, | ||||||
|     HostNotSupportedError, |     HostNotSupportedError, | ||||||
|     NetworkInterfaceNotFound, |     NetworkInterfaceNotFound, | ||||||
| ) | ) | ||||||
| @@ -143,7 +145,7 @@ class NetworkManager(DBusInterfaceProxy): | |||||||
|             await self.settings.connect(bus) |             await self.settings.connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to Network Manager") |             _LOGGER.warning("Can't connect to Network Manager") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No Network Manager support on the host. Local network functions have been disabled." |                 "No Network Manager support on the host. Local network functions have been disabled." | ||||||
|             ) |             ) | ||||||
| @@ -210,8 +212,22 @@ class NetworkManager(DBusInterfaceProxy): | |||||||
|                     # try to query it. Ignore those cases. |                     # try to query it. Ignore those cases. | ||||||
|                     _LOGGER.debug("Can't process %s: %s", device, err) |                     _LOGGER.debug("Can't process %s: %s", device, err) | ||||||
|                     continue |                     continue | ||||||
|  |                 except ( | ||||||
|  |                     DBusNoReplyError, | ||||||
|  |                     DBusServiceUnkownError, | ||||||
|  |                 ) as err: | ||||||
|  |                     # This typically means that NetworkManager disappeared. Give up immeaditly. | ||||||
|  |                     _LOGGER.error( | ||||||
|  |                         "NetworkManager not responding while processing %s: %s. Giving up.", | ||||||
|  |                         device, | ||||||
|  |                         err, | ||||||
|  |                     ) | ||||||
|  |                     capture_exception(err) | ||||||
|  |                     return | ||||||
|                 except Exception as err:  # pylint: disable=broad-except |                 except Exception as err:  # pylint: disable=broad-except | ||||||
|                     _LOGGER.exception("Error while processing %s: %s", device, err) |                     _LOGGER.exception( | ||||||
|  |                         "Unkown error while processing %s: %s", device, err | ||||||
|  |                     ) | ||||||
|                     capture_exception(err) |                     capture_exception(err) | ||||||
|                     continue |                     continue | ||||||
|  |  | ||||||
|   | |||||||
| @@ -12,7 +12,7 @@ from ...const import ( | |||||||
|     ATTR_PRIORITY, |     ATTR_PRIORITY, | ||||||
|     ATTR_VPN, |     ATTR_VPN, | ||||||
| ) | ) | ||||||
| from ...exceptions import DBusError, DBusInterfaceError | from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DBUS_ATTR_CONFIGURATION, |     DBUS_ATTR_CONFIGURATION, | ||||||
|     DBUS_ATTR_MODE, |     DBUS_ATTR_MODE, | ||||||
| @@ -67,7 +67,7 @@ class NetworkManagerDNS(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to DnsManager") |             _LOGGER.warning("Can't connect to DnsManager") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No DnsManager support on the host. Local DNS functions have been disabled." |                 "No DnsManager support on the host. Local DNS functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ | |||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
| import socket | import socket | ||||||
| from typing import TYPE_CHECKING, Any | from typing import TYPE_CHECKING | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from dbus_fast import Variant | from dbus_fast import Variant | ||||||
| @@ -19,6 +19,7 @@ from . import ( | |||||||
|     CONF_ATTR_PATH, |     CONF_ATTR_PATH, | ||||||
|     CONF_ATTR_VLAN, |     CONF_ATTR_VLAN, | ||||||
| ) | ) | ||||||
|  | from .. import NetworkManager | ||||||
| from ....host.const import InterfaceMethod, InterfaceType | from ....host.const import InterfaceMethod, InterfaceType | ||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
| @@ -26,8 +27,11 @@ if TYPE_CHECKING: | |||||||
|  |  | ||||||
|  |  | ||||||
| def get_connection_from_interface( | def get_connection_from_interface( | ||||||
|     interface: Interface, name: str | None = None, uuid: str | None = None |     interface: Interface, | ||||||
| ) -> Any: |     network_manager: NetworkManager, | ||||||
|  |     name: str | None = None, | ||||||
|  |     uuid: str | None = None, | ||||||
|  | ) -> dict[str, dict[str, Variant]]: | ||||||
|     """Generate message argument for network interface update.""" |     """Generate message argument for network interface update.""" | ||||||
|  |  | ||||||
|     # Generate/Update ID/name |     # Generate/Update ID/name | ||||||
| @@ -121,9 +125,15 @@ def get_connection_from_interface( | |||||||
|     if interface.type == InterfaceType.ETHERNET: |     if interface.type == InterfaceType.ETHERNET: | ||||||
|         conn[CONF_ATTR_802_ETHERNET] = {ATTR_ASSIGNED_MAC: Variant("s", "preserve")} |         conn[CONF_ATTR_802_ETHERNET] = {ATTR_ASSIGNED_MAC: Variant("s", "preserve")} | ||||||
|     elif interface.type == "vlan": |     elif interface.type == "vlan": | ||||||
|  |         parent = interface.vlan.interface | ||||||
|  |         if parent in network_manager and ( | ||||||
|  |             parent_connection := network_manager.get(parent).connection | ||||||
|  |         ): | ||||||
|  |             parent = parent_connection.uuid | ||||||
|  |  | ||||||
|         conn[CONF_ATTR_VLAN] = { |         conn[CONF_ATTR_VLAN] = { | ||||||
|             "id": Variant("u", interface.vlan.id), |             "id": Variant("u", interface.vlan.id), | ||||||
|             "parent": Variant("s", interface.vlan.interface), |             "parent": Variant("s", parent), | ||||||
|         } |         } | ||||||
|     elif interface.type == InterfaceType.WIRELESS: |     elif interface.type == InterfaceType.WIRELESS: | ||||||
|         wireless = { |         wireless = { | ||||||
| @@ -138,8 +148,8 @@ def get_connection_from_interface( | |||||||
|             wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY) |             wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY) | ||||||
|             wireless_security = {} |             wireless_security = {} | ||||||
|             if interface.wifi.auth == "wep": |             if interface.wifi.auth == "wep": | ||||||
|                 wireless_security["auth-alg"] = Variant("s", "none") |                 wireless_security["auth-alg"] = Variant("s", "open") | ||||||
|                 wireless_security["key-mgmt"] = Variant("s", "open") |                 wireless_security["key-mgmt"] = Variant("s", "none") | ||||||
|             elif interface.wifi.auth == "wpa-psk": |             elif interface.wifi.auth == "wpa-psk": | ||||||
|                 wireless_security["auth-alg"] = Variant("s", "open") |                 wireless_security["auth-alg"] = Variant("s", "open") | ||||||
|                 wireless_security["key-mgmt"] = Variant("s", "wpa-psk") |                 wireless_security["key-mgmt"] = Variant("s", "wpa-psk") | ||||||
|   | |||||||
| @@ -4,7 +4,7 @@ from typing import Any | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ...exceptions import DBusError, DBusInterfaceError | from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..const import DBUS_NAME_NM, DBUS_OBJECT_SETTINGS | from ..const import DBUS_NAME_NM, DBUS_OBJECT_SETTINGS | ||||||
| from ..interface import DBusInterface | from ..interface import DBusInterface | ||||||
| from ..network.setting import NetworkSetting | from ..network.setting import NetworkSetting | ||||||
| @@ -28,7 +28,7 @@ class NetworkManagerSettings(DBusInterface): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to Network Manager Settings") |             _LOGGER.warning("Can't connect to Network Manager Settings") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No Network Manager Settings support on the host. Local network functions have been disabled." |                 "No Network Manager Settings support on the host. Local network functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -4,7 +4,7 @@ from typing import Any | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..utils.dbus import DBusSignalWrapper | from ..utils.dbus import DBusSignalWrapper | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_BOOT_SLOT, |     DBUS_ATTR_BOOT_SLOT, | ||||||
| @@ -49,7 +49,7 @@ class Rauc(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to rauc") |             _LOGGER.warning("Can't connect to rauc") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning("Host has no rauc support. OTA updates have been disabled.") |             _LOGGER.warning("Host has no rauc support. OTA updates have been disabled.") | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_CACHE_STATISTICS, |     DBUS_ATTR_CACHE_STATISTICS, | ||||||
|     DBUS_ATTR_CURRENT_DNS_SERVER, |     DBUS_ATTR_CURRENT_DNS_SERVER, | ||||||
| @@ -59,7 +59,7 @@ class Resolved(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-resolved.") |             _LOGGER.warning("Can't connect to systemd-resolved.") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "Host has no systemd-resolved support. DNS will not work correctly." |                 "Host has no systemd-resolved support. DNS will not work correctly." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -10,6 +10,7 @@ from ..exceptions import ( | |||||||
|     DBusError, |     DBusError, | ||||||
|     DBusFatalError, |     DBusFatalError, | ||||||
|     DBusInterfaceError, |     DBusInterfaceError, | ||||||
|  |     DBusServiceUnkownError, | ||||||
|     DBusSystemdNoSuchUnit, |     DBusSystemdNoSuchUnit, | ||||||
| ) | ) | ||||||
| from .const import ( | from .const import ( | ||||||
| @@ -86,7 +87,7 @@ class Systemd(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd") |             _LOGGER.warning("Can't connect to systemd") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No systemd support on the host. Host control has been disabled." |                 "No systemd support on the host. Host control has been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -4,7 +4,7 @@ import logging | |||||||
|  |  | ||||||
| from dbus_fast.aio.message_bus import MessageBus | from dbus_fast.aio.message_bus import MessageBus | ||||||
|  |  | ||||||
| from ..exceptions import DBusError, DBusInterfaceError | from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError | ||||||
| from ..utils.dt import utc_from_timestamp | from ..utils.dt import utc_from_timestamp | ||||||
| from .const import ( | from .const import ( | ||||||
|     DBUS_ATTR_NTP, |     DBUS_ATTR_NTP, | ||||||
| @@ -63,7 +63,7 @@ class TimeDate(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to systemd-timedate") |             _LOGGER.warning("Can't connect to systemd-timedate") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No timedate support on the host. Time/Date functions have been disabled." |                 "No timedate support on the host. Time/Date functions have been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -6,7 +6,12 @@ from typing import Any | |||||||
| from awesomeversion import AwesomeVersion | from awesomeversion import AwesomeVersion | ||||||
| from dbus_fast.aio import MessageBus | from dbus_fast.aio import MessageBus | ||||||
|  |  | ||||||
| from ...exceptions import DBusError, DBusInterfaceError, DBusObjectError | from ...exceptions import ( | ||||||
|  |     DBusError, | ||||||
|  |     DBusInterfaceError, | ||||||
|  |     DBusObjectError, | ||||||
|  |     DBusServiceUnkownError, | ||||||
|  | ) | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DBUS_ATTR_SUPPORTED_FILESYSTEMS, |     DBUS_ATTR_SUPPORTED_FILESYSTEMS, | ||||||
|     DBUS_ATTR_VERSION, |     DBUS_ATTR_VERSION, | ||||||
| @@ -45,7 +50,7 @@ class UDisks2(DBusInterfaceProxy): | |||||||
|             await super().connect(bus) |             await super().connect(bus) | ||||||
|         except DBusError: |         except DBusError: | ||||||
|             _LOGGER.warning("Can't connect to udisks2") |             _LOGGER.warning("Can't connect to udisks2") | ||||||
|         except DBusInterfaceError: |         except (DBusServiceUnkownError, DBusInterfaceError): | ||||||
|             _LOGGER.warning( |             _LOGGER.warning( | ||||||
|                 "No udisks2 support on the host. Host control has been disabled." |                 "No udisks2 support on the host. Host control has been disabled." | ||||||
|             ) |             ) | ||||||
|   | |||||||
| @@ -3,10 +3,9 @@ | |||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
| from inspect import get_annotations | from inspect import get_annotations | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from typing import Any, TypedDict | from typing import Any, NotRequired, TypedDict | ||||||
|  |  | ||||||
| from dbus_fast import Variant | from dbus_fast import Variant | ||||||
| from typing_extensions import NotRequired |  | ||||||
|  |  | ||||||
| from .const import EncryptType, EraseMode | from .const import EncryptType, EraseMode | ||||||
|  |  | ||||||
|   | |||||||
| @@ -15,15 +15,10 @@ from docker.types import Mount | |||||||
| import requests | import requests | ||||||
|  |  | ||||||
| from ..addons.build import AddonBuild | from ..addons.build import AddonBuild | ||||||
|  | from ..addons.const import MappingType | ||||||
| from ..bus import EventListener | from ..bus import EventListener | ||||||
| from ..const import ( | from ..const import ( | ||||||
|     DOCKER_CPU_RUNTIME_ALLOCATION, |     DOCKER_CPU_RUNTIME_ALLOCATION, | ||||||
|     MAP_ADDONS, |  | ||||||
|     MAP_BACKUP, |  | ||||||
|     MAP_CONFIG, |  | ||||||
|     MAP_MEDIA, |  | ||||||
|     MAP_SHARE, |  | ||||||
|     MAP_SSL, |  | ||||||
|     SECURITY_DISABLE, |     SECURITY_DISABLE, | ||||||
|     SECURITY_PROFILE, |     SECURITY_PROFILE, | ||||||
|     SYSTEMD_JOURNAL_PERSISTENT, |     SYSTEMD_JOURNAL_PERSISTENT, | ||||||
| @@ -329,75 +324,117 @@ class DockerAddon(DockerInterface): | |||||||
|         """Return mounts for container.""" |         """Return mounts for container.""" | ||||||
|         addon_mapping = self.addon.map_volumes |         addon_mapping = self.addon.map_volumes | ||||||
|  |  | ||||||
|  |         target_data_path = "" | ||||||
|  |         if MappingType.DATA in addon_mapping: | ||||||
|  |             target_data_path = addon_mapping[MappingType.DATA].path | ||||||
|  |  | ||||||
|         mounts = [ |         mounts = [ | ||||||
|             MOUNT_DEV, |             MOUNT_DEV, | ||||||
|             Mount( |             Mount( | ||||||
|                 type=MountType.BIND, |                 type=MountType.BIND, | ||||||
|                 source=self.addon.path_extern_data.as_posix(), |                 source=self.addon.path_extern_data.as_posix(), | ||||||
|                 target="/data", |                 target=target_data_path or "/data", | ||||||
|                 read_only=False, |                 read_only=False, | ||||||
|             ), |             ), | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|         # setup config mappings |         # setup config mappings | ||||||
|         if MAP_CONFIG in addon_mapping: |         if MappingType.CONFIG in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_homeassistant.as_posix(), |                     source=self.sys_config.path_extern_homeassistant.as_posix(), | ||||||
|                     target="/config", |                     target=addon_mapping[MappingType.CONFIG].path or "/config", | ||||||
|                     read_only=addon_mapping[MAP_CONFIG], |                     read_only=addon_mapping[MappingType.CONFIG].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_SSL in addon_mapping: |         else: | ||||||
|  |             # Map addon's public config folder if not using deprecated config option | ||||||
|  |             if self.addon.addon_config_used: | ||||||
|  |                 mounts.append( | ||||||
|  |                     Mount( | ||||||
|  |                         type=MountType.BIND, | ||||||
|  |                         source=self.addon.path_extern_config.as_posix(), | ||||||
|  |                         target=addon_mapping[MappingType.ADDON_CONFIG].path | ||||||
|  |                         or "/config", | ||||||
|  |                         read_only=addon_mapping[MappingType.ADDON_CONFIG].read_only, | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |             # Map Home Assistant config in new way | ||||||
|  |             if MappingType.HOMEASSISTANT_CONFIG in addon_mapping: | ||||||
|  |                 mounts.append( | ||||||
|  |                     Mount( | ||||||
|  |                         type=MountType.BIND, | ||||||
|  |                         source=self.sys_config.path_extern_homeassistant.as_posix(), | ||||||
|  |                         target=addon_mapping[MappingType.HOMEASSISTANT_CONFIG].path | ||||||
|  |                         or "/homeassistant", | ||||||
|  |                         read_only=addon_mapping[ | ||||||
|  |                             MappingType.HOMEASSISTANT_CONFIG | ||||||
|  |                         ].read_only, | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |         if MappingType.ALL_ADDON_CONFIGS in addon_mapping: | ||||||
|  |             mounts.append( | ||||||
|  |                 Mount( | ||||||
|  |                     type=MountType.BIND, | ||||||
|  |                     source=self.sys_config.path_extern_addon_configs.as_posix(), | ||||||
|  |                     target=addon_mapping[MappingType.ALL_ADDON_CONFIGS].path | ||||||
|  |                     or "/addon_configs", | ||||||
|  |                     read_only=addon_mapping[MappingType.ALL_ADDON_CONFIGS].read_only, | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         if MappingType.SSL in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_ssl.as_posix(), |                     source=self.sys_config.path_extern_ssl.as_posix(), | ||||||
|                     target="/ssl", |                     target=addon_mapping[MappingType.SSL].path or "/ssl", | ||||||
|                     read_only=addon_mapping[MAP_SSL], |                     read_only=addon_mapping[MappingType.SSL].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_ADDONS in addon_mapping: |         if MappingType.ADDONS in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_addons_local.as_posix(), |                     source=self.sys_config.path_extern_addons_local.as_posix(), | ||||||
|                     target="/addons", |                     target=addon_mapping[MappingType.ADDONS].path or "/addons", | ||||||
|                     read_only=addon_mapping[MAP_ADDONS], |                     read_only=addon_mapping[MappingType.ADDONS].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_BACKUP in addon_mapping: |         if MappingType.BACKUP in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_backup.as_posix(), |                     source=self.sys_config.path_extern_backup.as_posix(), | ||||||
|                     target="/backup", |                     target=addon_mapping[MappingType.BACKUP].path or "/backup", | ||||||
|                     read_only=addon_mapping[MAP_BACKUP], |                     read_only=addon_mapping[MappingType.BACKUP].read_only, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_SHARE in addon_mapping: |         if MappingType.SHARE in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_share.as_posix(), |                     source=self.sys_config.path_extern_share.as_posix(), | ||||||
|                     target="/share", |                     target=addon_mapping[MappingType.SHARE].path or "/share", | ||||||
|                     read_only=addon_mapping[MAP_SHARE], |                     read_only=addon_mapping[MappingType.SHARE].read_only, | ||||||
|                     propagation=PropagationMode.RSLAVE, |                     propagation=PropagationMode.RSLAVE, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if MAP_MEDIA in addon_mapping: |         if MappingType.MEDIA in addon_mapping: | ||||||
|             mounts.append( |             mounts.append( | ||||||
|                 Mount( |                 Mount( | ||||||
|                     type=MountType.BIND, |                     type=MountType.BIND, | ||||||
|                     source=self.sys_config.path_extern_media.as_posix(), |                     source=self.sys_config.path_extern_media.as_posix(), | ||||||
|                     target="/media", |                     target=addon_mapping[MappingType.MEDIA].path or "/media", | ||||||
|                     read_only=addon_mapping[MAP_MEDIA], |                     read_only=addon_mapping[MappingType.MEDIA].read_only, | ||||||
|                     propagation=PropagationMode.RSLAVE, |                     propagation=PropagationMode.RSLAVE, | ||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
| @@ -501,24 +538,16 @@ class DockerAddon(DockerInterface): | |||||||
|     ) |     ) | ||||||
|     async def run(self) -> None: |     async def run(self) -> None: | ||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         if await self.is_running(): |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Security check |         # Security check | ||||||
|         if not self.addon.protected: |         if not self.addon.protected: | ||||||
|             _LOGGER.warning("%s running with disabled protected mode!", self.addon.name) |             _LOGGER.warning("%s running with disabled protected mode!", self.addon.name) | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         await self.stop() |  | ||||||
|  |  | ||||||
|         # Don't set a hostname if no separate UTS namespace is used |         # Don't set a hostname if no separate UTS namespace is used | ||||||
|         hostname = None if self.uts_mode else self.addon.hostname |         hostname = None if self.uts_mode else self.addon.hostname | ||||||
|  |  | ||||||
|         # Create & Run container |         # Create & Run container | ||||||
|         try: |         try: | ||||||
|             docker_container = await self.sys_run_in_executor( |             await self._run( | ||||||
|                 self.sys_docker.run, |  | ||||||
|                 self.image, |  | ||||||
|                 tag=str(self.addon.version), |                 tag=str(self.addon.version), | ||||||
|                 name=self.name, |                 name=self.name, | ||||||
|                 hostname=hostname, |                 hostname=hostname, | ||||||
| @@ -549,7 +578,6 @@ class DockerAddon(DockerInterface): | |||||||
|             ) |             ) | ||||||
|             raise |             raise | ||||||
|  |  | ||||||
|         self._meta = docker_container.attrs |  | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|             "Starting Docker add-on %s with version %s", self.image, self.version |             "Starting Docker add-on %s with version %s", self.image, self.version | ||||||
|         ) |         ) | ||||||
| @@ -575,7 +603,11 @@ class DockerAddon(DockerInterface): | |||||||
|         on_condition=DockerJobError, |         on_condition=DockerJobError, | ||||||
|     ) |     ) | ||||||
|     async def update( |     async def update( | ||||||
|         self, version: AwesomeVersion, image: str | None = None, latest: bool = False |         self, | ||||||
|  |         version: AwesomeVersion, | ||||||
|  |         image: str | None = None, | ||||||
|  |         latest: bool = False, | ||||||
|  |         arch: CpuArch | None = None, | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         """Update a docker image.""" |         """Update a docker image.""" | ||||||
|         image = image or self.image |         image = image or self.image | ||||||
| @@ -586,13 +618,13 @@ class DockerAddon(DockerInterface): | |||||||
|  |  | ||||||
|         # Update docker image |         # Update docker image | ||||||
|         await self.install( |         await self.install( | ||||||
|             version, image=image, latest=latest, need_build=self.addon.latest_need_build |             version, | ||||||
|  |             image=image, | ||||||
|  |             latest=latest, | ||||||
|  |             arch=arch, | ||||||
|  |             need_build=self.addon.latest_need_build, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         # Stop container & cleanup |  | ||||||
|         with suppress(DockerError): |  | ||||||
|             await self.stop() |  | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|         name="docker_addon_install", |         name="docker_addon_install", | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
|   | |||||||
| @@ -92,16 +92,7 @@ class DockerAudio(DockerInterface, CoreSysAttributes): | |||||||
|     ) |     ) | ||||||
|     async def run(self) -> None: |     async def run(self) -> None: | ||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         if await self.is_running(): |         await self._run( | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         await self.stop() |  | ||||||
|  |  | ||||||
|         # Create & Run container |  | ||||||
|         docker_container = await self.sys_run_in_executor( |  | ||||||
|             self.sys_docker.run, |  | ||||||
|             self.image, |  | ||||||
|             tag=str(self.sys_plugins.audio.version), |             tag=str(self.sys_plugins.audio.version), | ||||||
|             init=False, |             init=False, | ||||||
|             ipv4=self.sys_docker.network.audio, |             ipv4=self.sys_docker.network.audio, | ||||||
| @@ -118,8 +109,6 @@ class DockerAudio(DockerInterface, CoreSysAttributes): | |||||||
|             }, |             }, | ||||||
|             mounts=self.mounts, |             mounts=self.mounts, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         self._meta = docker_container.attrs |  | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|             "Starting Audio %s with version %s - %s", |             "Starting Audio %s with version %s - %s", | ||||||
|             self.image, |             self.image, | ||||||
|   | |||||||
| @@ -33,16 +33,7 @@ class DockerCli(DockerInterface, CoreSysAttributes): | |||||||
|     ) |     ) | ||||||
|     async def run(self) -> None: |     async def run(self) -> None: | ||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         if await self.is_running(): |         await self._run( | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         await self.stop() |  | ||||||
|  |  | ||||||
|         # Create & Run container |  | ||||||
|         docker_container = await self.sys_run_in_executor( |  | ||||||
|             self.sys_docker.run, |  | ||||||
|             self.image, |  | ||||||
|             entrypoint=["/init"], |             entrypoint=["/init"], | ||||||
|             tag=str(self.sys_plugins.cli.version), |             tag=str(self.sys_plugins.cli.version), | ||||||
|             init=False, |             init=False, | ||||||
| @@ -60,8 +51,6 @@ class DockerCli(DockerInterface, CoreSysAttributes): | |||||||
|                 ENV_TOKEN: self.sys_plugins.cli.supervisor_token, |                 ENV_TOKEN: self.sys_plugins.cli.supervisor_token, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         self._meta = docker_container.attrs |  | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|             "Starting CLI %s with version %s - %s", |             "Starting CLI %s with version %s - %s", | ||||||
|             self.image, |             self.image, | ||||||
|   | |||||||
| @@ -35,16 +35,7 @@ class DockerDNS(DockerInterface, CoreSysAttributes): | |||||||
|     ) |     ) | ||||||
|     async def run(self) -> None: |     async def run(self) -> None: | ||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         if await self.is_running(): |         await self._run( | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         await self.stop() |  | ||||||
|  |  | ||||||
|         # Create & Run container |  | ||||||
|         docker_container = await self.sys_run_in_executor( |  | ||||||
|             self.sys_docker.run, |  | ||||||
|             self.image, |  | ||||||
|             tag=str(self.sys_plugins.dns.version), |             tag=str(self.sys_plugins.dns.version), | ||||||
|             init=False, |             init=False, | ||||||
|             dns=False, |             dns=False, | ||||||
| @@ -65,8 +56,6 @@ class DockerDNS(DockerInterface, CoreSysAttributes): | |||||||
|             ], |             ], | ||||||
|             oom_score_adj=-300, |             oom_score_adj=-300, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         self._meta = docker_container.attrs |  | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|             "Starting DNS %s with version %s - %s", |             "Starting DNS %s with version %s - %s", | ||||||
|             self.image, |             self.image, | ||||||
|   | |||||||
| @@ -53,9 +53,10 @@ class DockerHomeAssistant(DockerInterface): | |||||||
|     @property |     @property | ||||||
|     def timeout(self) -> int: |     def timeout(self) -> int: | ||||||
|         """Return timeout for Docker actions.""" |         """Return timeout for Docker actions.""" | ||||||
|         # Synchronized homeassistant's S6_SERVICES_GRACETIME |         # Synchronized with the homeassistant core container's S6_SERVICES_GRACETIME | ||||||
|         # to avoid killing Home Assistant Core |         # to avoid killing Home Assistant Core, see | ||||||
|         return 220 + 20 |         # https://github.com/home-assistant/core/tree/dev/Dockerfile | ||||||
|  |         return 240 + 20 | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def ip_address(self) -> IPv4Address: |     def ip_address(self) -> IPv4Address: | ||||||
| @@ -152,16 +153,7 @@ class DockerHomeAssistant(DockerInterface): | |||||||
|     ) |     ) | ||||||
|     async def run(self) -> None: |     async def run(self) -> None: | ||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         if await self.is_running(): |         await self._run( | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         await self.stop() |  | ||||||
|  |  | ||||||
|         # Create & Run container |  | ||||||
|         docker_container = await self.sys_run_in_executor( |  | ||||||
|             self.sys_docker.run, |  | ||||||
|             self.image, |  | ||||||
|             tag=(self.sys_homeassistant.version), |             tag=(self.sys_homeassistant.version), | ||||||
|             name=self.name, |             name=self.name, | ||||||
|             hostname=self.name, |             hostname=self.name, | ||||||
| @@ -186,8 +178,6 @@ class DockerHomeAssistant(DockerInterface): | |||||||
|             tmpfs={"/tmp": ""}, |             tmpfs={"/tmp": ""}, | ||||||
|             oom_score_adj=-300, |             oom_score_adj=-300, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         self._meta = docker_container.attrs |  | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|             "Starting Home Assistant %s with version %s", self.image, self.version |             "Starting Home Assistant %s with version %s", self.image, self.version | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -1,7 +1,6 @@ | |||||||
| """Interface class for Supervisor Docker object.""" | """Interface class for Supervisor Docker object.""" | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
| import asyncio |  | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| from collections.abc import Awaitable | from collections.abc import Awaitable | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
| @@ -92,7 +91,6 @@ class DockerInterface(JobGroup): | |||||||
|         ) |         ) | ||||||
|         self.coresys: CoreSys = coresys |         self.coresys: CoreSys = coresys | ||||||
|         self._meta: dict[str, Any] | None = None |         self._meta: dict[str, Any] | None = None | ||||||
|         self.lock: asyncio.Lock = asyncio.Lock() |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def timeout(self) -> int: |     def timeout(self) -> int: | ||||||
| @@ -153,7 +151,7 @@ class DockerInterface(JobGroup): | |||||||
|     @property |     @property | ||||||
|     def in_progress(self) -> bool: |     def in_progress(self) -> bool: | ||||||
|         """Return True if a task is in progress.""" |         """Return True if a task is in progress.""" | ||||||
|         return self.lock.locked() |         return self.active_job | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def restart_policy(self) -> RestartPolicy | None: |     def restart_policy(self) -> RestartPolicy | None: | ||||||
| @@ -379,6 +377,27 @@ class DockerInterface(JobGroup): | |||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         raise NotImplementedError() |         raise NotImplementedError() | ||||||
|  |  | ||||||
|  |     async def _run(self, **kwargs) -> None: | ||||||
|  |         """Run Docker image with retry inf necessary.""" | ||||||
|  |         if await self.is_running(): | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Cleanup | ||||||
|  |         await self.stop() | ||||||
|  |  | ||||||
|  |         # Create & Run container | ||||||
|  |         try: | ||||||
|  |             docker_container = await self.sys_run_in_executor( | ||||||
|  |                 self.sys_docker.run, self.image, **kwargs | ||||||
|  |             ) | ||||||
|  |         except DockerNotFound as err: | ||||||
|  |             # If image is missing, capture the exception as this shouldn't happen | ||||||
|  |             capture_exception(err) | ||||||
|  |             raise | ||||||
|  |  | ||||||
|  |         # Store metadata | ||||||
|  |         self._meta = docker_container.attrs | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|         name="docker_interface_stop", |         name="docker_interface_stop", | ||||||
|         limit=JobExecutionLimit.GROUP_ONCE, |         limit=JobExecutionLimit.GROUP_ONCE, | ||||||
| @@ -451,12 +470,17 @@ class DockerInterface(JobGroup): | |||||||
|         return b"" |         return b"" | ||||||
|  |  | ||||||
|     @Job(name="docker_interface_cleanup", limit=JobExecutionLimit.GROUP_WAIT) |     @Job(name="docker_interface_cleanup", limit=JobExecutionLimit.GROUP_WAIT) | ||||||
|     def cleanup(self, old_image: str | None = None) -> Awaitable[None]: |     def cleanup( | ||||||
|  |         self, | ||||||
|  |         old_image: str | None = None, | ||||||
|  |         image: str | None = None, | ||||||
|  |         version: AwesomeVersion | None = None, | ||||||
|  |     ) -> Awaitable[None]: | ||||||
|         """Check if old version exists and cleanup.""" |         """Check if old version exists and cleanup.""" | ||||||
|         return self.sys_run_in_executor( |         return self.sys_run_in_executor( | ||||||
|             self.sys_docker.cleanup_old_images, |             self.sys_docker.cleanup_old_images, | ||||||
|             self.image, |             image or self.image, | ||||||
|             self.version, |             version or self.version, | ||||||
|             {old_image} if old_image else None, |             {old_image} if old_image else None, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -38,16 +38,7 @@ class DockerMulticast(DockerInterface, CoreSysAttributes): | |||||||
|     ) |     ) | ||||||
|     async def run(self) -> None: |     async def run(self) -> None: | ||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         if await self.is_running(): |         await self._run( | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         await self.stop() |  | ||||||
|  |  | ||||||
|         # Create & Run container |  | ||||||
|         docker_container = await self.sys_run_in_executor( |  | ||||||
|             self.sys_docker.run, |  | ||||||
|             self.image, |  | ||||||
|             tag=str(self.sys_plugins.multicast.version), |             tag=str(self.sys_plugins.multicast.version), | ||||||
|             init=False, |             init=False, | ||||||
|             name=self.name, |             name=self.name, | ||||||
| @@ -59,8 +50,6 @@ class DockerMulticast(DockerInterface, CoreSysAttributes): | |||||||
|             extra_hosts={"supervisor": self.sys_docker.network.supervisor}, |             extra_hosts={"supervisor": self.sys_docker.network.supervisor}, | ||||||
|             environment={ENV_TIME: self.sys_timezone}, |             environment={ENV_TIME: self.sys_timezone}, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         self._meta = docker_container.attrs |  | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|             "Starting Multicast %s with version %s - Host", self.image, self.version |             "Starting Multicast %s with version %s - Host", self.image, self.version | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -35,16 +35,7 @@ class DockerObserver(DockerInterface, CoreSysAttributes): | |||||||
|     ) |     ) | ||||||
|     async def run(self) -> None: |     async def run(self) -> None: | ||||||
|         """Run Docker image.""" |         """Run Docker image.""" | ||||||
|         if await self.is_running(): |         await self._run( | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Cleanup |  | ||||||
|         await self.stop() |  | ||||||
|  |  | ||||||
|         # Create & Run container |  | ||||||
|         docker_container = await self.sys_run_in_executor( |  | ||||||
|             self.sys_docker.run, |  | ||||||
|             self.image, |  | ||||||
|             tag=str(self.sys_plugins.observer.version), |             tag=str(self.sys_plugins.observer.version), | ||||||
|             init=False, |             init=False, | ||||||
|             ipv4=self.sys_docker.network.observer, |             ipv4=self.sys_docker.network.observer, | ||||||
| @@ -63,8 +54,6 @@ class DockerObserver(DockerInterface, CoreSysAttributes): | |||||||
|             ports={"80/tcp": 4357}, |             ports={"80/tcp": 4357}, | ||||||
|             oom_score_adj=-300, |             oom_score_adj=-300, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         self._meta = docker_container.attrs |  | ||||||
|         _LOGGER.info( |         _LOGGER.info( | ||||||
|             "Starting Observer %s with version %s - %s", |             "Starting Observer %s with version %s - %s", | ||||||
|             self.image, |             self.image, | ||||||
|   | |||||||
| @@ -67,6 +67,10 @@ class HomeAssistantCrashError(HomeAssistantError): | |||||||
|     """Error on crash of a Home Assistant startup.""" |     """Error on crash of a Home Assistant startup.""" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class HomeAssistantStartupTimeout(HomeAssistantCrashError): | ||||||
|  |     """Timeout waiting for Home Assistant successful startup.""" | ||||||
|  |  | ||||||
|  |  | ||||||
| class HomeAssistantAPIError(HomeAssistantError): | class HomeAssistantAPIError(HomeAssistantError): | ||||||
|     """Home Assistant API exception.""" |     """Home Assistant API exception.""" | ||||||
|  |  | ||||||
| @@ -331,6 +335,10 @@ class DBusNotConnectedError(HostNotSupportedError): | |||||||
|     """D-Bus is not connected and call a method.""" |     """D-Bus is not connected and call a method.""" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DBusServiceUnkownError(HassioNotSupportedError): | ||||||
|  |     """D-Bus service was not available.""" | ||||||
|  |  | ||||||
|  |  | ||||||
| class DBusInterfaceError(HassioNotSupportedError): | class DBusInterfaceError(HassioNotSupportedError): | ||||||
|     """D-Bus interface not connected.""" |     """D-Bus interface not connected.""" | ||||||
|  |  | ||||||
| @@ -359,6 +367,10 @@ class DBusTimeoutError(DBusError): | |||||||
|     """D-Bus call timed out.""" |     """D-Bus call timed out.""" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DBusNoReplyError(DBusError): | ||||||
|  |     """D-Bus remote didn't reply/disconnected.""" | ||||||
|  |  | ||||||
|  |  | ||||||
| class DBusFatalError(DBusError): | class DBusFatalError(DBusError): | ||||||
|     """D-Bus call going wrong. |     """D-Bus call going wrong. | ||||||
|  |  | ||||||
| @@ -581,6 +593,10 @@ class HomeAssistantBackupError(BackupError, HomeAssistantError): | |||||||
|     """Raise if an error during Home Assistant Core backup is happening.""" |     """Raise if an error during Home Assistant Core backup is happening.""" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BackupInvalidError(BackupError): | ||||||
|  |     """Raise if backup or password provided is invalid.""" | ||||||
|  |  | ||||||
|  |  | ||||||
| class BackupJobError(BackupError, JobException): | class BackupJobError(BackupError, JobException): | ||||||
|     """Raise on Backup job error.""" |     """Raise on Backup job error.""" | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| """Read hardware info from system.""" | """Read hardware info from system.""" | ||||||
| from datetime import datetime | from datetime import UTC, datetime | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import re | import re | ||||||
| @@ -55,7 +55,7 @@ class HwHelper(CoreSysAttributes): | |||||||
|             _LOGGER.error("Can't found last boot time!") |             _LOGGER.error("Can't found last boot time!") | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         return datetime.utcfromtimestamp(int(found.group(1))) |         return datetime.fromtimestamp(int(found.group(1)), UTC) | ||||||
|  |  | ||||||
|     def hide_virtual_device(self, udev_device: pyudev.Device) -> bool: |     def hide_virtual_device(self, udev_device: pyudev.Device) -> bool: | ||||||
|         """Small helper to hide not needed Devices.""" |         """Small helper to hide not needed Devices.""" | ||||||
|   | |||||||
| @@ -13,7 +13,7 @@ from ..coresys import CoreSys, CoreSysAttributes | |||||||
| from ..exceptions import HomeAssistantAPIError, HomeAssistantAuthError | from ..exceptions import HomeAssistantAPIError, HomeAssistantAuthError | ||||||
| from ..jobs.const import JobExecutionLimit | from ..jobs.const import JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
| from ..utils import check_port | from ..utils import check_port, version_is_new_enough | ||||||
| from .const import LANDINGPAGE | from .const import LANDINGPAGE | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
| @@ -107,7 +107,7 @@ class HomeAssistantAPI(CoreSysAttributes): | |||||||
|                         continue |                         continue | ||||||
|                     yield resp |                     yield resp | ||||||
|                     return |                     return | ||||||
|             except (asyncio.TimeoutError, aiohttp.ClientError) as err: |             except (TimeoutError, aiohttp.ClientError) as err: | ||||||
|                 _LOGGER.error("Error on call %s: %s", url, err) |                 _LOGGER.error("Error on call %s: %s", url, err) | ||||||
|                 break |                 break | ||||||
|  |  | ||||||
| @@ -130,34 +130,41 @@ class HomeAssistantAPI(CoreSysAttributes): | |||||||
|         """Return Home Assistant core state.""" |         """Return Home Assistant core state.""" | ||||||
|         return await self._get_json("api/core/state") |         return await self._get_json("api/core/state") | ||||||
|  |  | ||||||
|     async def check_api_state(self) -> bool: |     async def get_api_state(self) -> str | None: | ||||||
|         """Return True if Home Assistant up and running.""" |         """Return state of Home Assistant Core or None.""" | ||||||
|         # Skip check on landingpage |         # Skip check on landingpage | ||||||
|         if ( |         if ( | ||||||
|             self.sys_homeassistant.version is None |             self.sys_homeassistant.version is None | ||||||
|             or self.sys_homeassistant.version == LANDINGPAGE |             or self.sys_homeassistant.version == LANDINGPAGE | ||||||
|         ): |         ): | ||||||
|             return False |             return None | ||||||
|  |  | ||||||
|         # Check if port is up |         # Check if port is up | ||||||
|         if not await self.sys_run_in_executor( |         if not await check_port( | ||||||
|             check_port, |  | ||||||
|             self.sys_homeassistant.ip_address, |             self.sys_homeassistant.ip_address, | ||||||
|             self.sys_homeassistant.api_port, |             self.sys_homeassistant.api_port, | ||||||
|         ): |         ): | ||||||
|             return False |             return None | ||||||
|  |  | ||||||
|         # Check if API is up |         # Check if API is up | ||||||
|         with suppress(HomeAssistantAPIError): |         with suppress(HomeAssistantAPIError): | ||||||
|             # get_core_state is available since 2023.8.0 and preferred |             # get_core_state is available since 2023.8.0 and preferred | ||||||
|             # since it is significantly faster than get_config because |             # since it is significantly faster than get_config because | ||||||
|             # it does not require serializing the entire config |             # it does not require serializing the entire config | ||||||
|             if self.sys_homeassistant.version >= GET_CORE_STATE_MIN_VERSION: |             if version_is_new_enough( | ||||||
|  |                 self.sys_homeassistant.version, GET_CORE_STATE_MIN_VERSION | ||||||
|  |             ): | ||||||
|                 data = await self.get_core_state() |                 data = await self.get_core_state() | ||||||
|             else: |             else: | ||||||
|                 data = await self.get_config() |                 data = await self.get_config() | ||||||
|             # Older versions of home assistant does not expose the state |             # Older versions of home assistant does not expose the state | ||||||
|             if data and data.get("state", "RUNNING") == "RUNNING": |             if data: | ||||||
|                 return True |                 return data.get("state", "RUNNING") | ||||||
|  |  | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     async def check_api_state(self) -> bool: | ||||||
|  |         """Return Home Assistant Core state if up.""" | ||||||
|  |         if state := await self.get_api_state(): | ||||||
|  |             return state == "RUNNING" | ||||||
|         return False |         return False | ||||||
|   | |||||||
| @@ -2,12 +2,14 @@ | |||||||
| import asyncio | import asyncio | ||||||
| from collections.abc import Awaitable | from collections.abc import Awaitable | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
|  | from dataclasses import dataclass | ||||||
|  | from datetime import datetime, timedelta | ||||||
| import logging | import logging | ||||||
| import re | import re | ||||||
| import secrets | import secrets | ||||||
| import shutil | import shutil | ||||||
|  | from typing import Final | ||||||
|  |  | ||||||
| import attr |  | ||||||
| from awesomeversion import AwesomeVersion | from awesomeversion import AwesomeVersion | ||||||
|  |  | ||||||
| from ..const import ATTR_HOMEASSISTANT, BusEvent | from ..const import ATTR_HOMEASSISTANT, BusEvent | ||||||
| @@ -21,6 +23,7 @@ from ..exceptions import ( | |||||||
|     HomeAssistantCrashError, |     HomeAssistantCrashError, | ||||||
|     HomeAssistantError, |     HomeAssistantError, | ||||||
|     HomeAssistantJobError, |     HomeAssistantJobError, | ||||||
|  |     HomeAssistantStartupTimeout, | ||||||
|     HomeAssistantUpdateError, |     HomeAssistantUpdateError, | ||||||
|     JobException, |     JobException, | ||||||
| ) | ) | ||||||
| @@ -40,15 +43,20 @@ from .const import ( | |||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  | SECONDS_BETWEEN_API_CHECKS: Final[int] = 5 | ||||||
|  | # Core Stage 1 and some wiggle room | ||||||
|  | STARTUP_API_RESPONSE_TIMEOUT: Final[timedelta] = timedelta(minutes=3) | ||||||
|  | # All stages plus event start timeout and some wiggle rooom | ||||||
|  | STARTUP_API_CHECK_RUNNING_TIMEOUT: Final[timedelta] = timedelta(minutes=15) | ||||||
| RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml") | RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml") | ||||||
|  |  | ||||||
|  |  | ||||||
| @attr.s(frozen=True) | @dataclass | ||||||
| class ConfigResult: | class ConfigResult: | ||||||
|     """Return object from config check.""" |     """Return object from config check.""" | ||||||
|  |  | ||||||
|     valid = attr.ib() |     valid: bool | ||||||
|     log = attr.ib() |     log: str | ||||||
|  |  | ||||||
|  |  | ||||||
| class HomeAssistantCore(JobGroup): | class HomeAssistantCore(JobGroup): | ||||||
| @@ -58,7 +66,6 @@ class HomeAssistantCore(JobGroup): | |||||||
|         """Initialize Home Assistant object.""" |         """Initialize Home Assistant object.""" | ||||||
|         super().__init__(coresys, JOB_GROUP_HOME_ASSISTANT_CORE) |         super().__init__(coresys, JOB_GROUP_HOME_ASSISTANT_CORE) | ||||||
|         self.instance: DockerHomeAssistant = DockerHomeAssistant(coresys) |         self.instance: DockerHomeAssistant = DockerHomeAssistant(coresys) | ||||||
|         self.lock: asyncio.Lock = asyncio.Lock() |  | ||||||
|         self._error_state: bool = False |         self._error_state: bool = False | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -122,7 +129,7 @@ class HomeAssistantCore(JobGroup): | |||||||
|         while True: |         while True: | ||||||
|             if not self.sys_updater.image_homeassistant: |             if not self.sys_updater.image_homeassistant: | ||||||
|                 _LOGGER.warning( |                 _LOGGER.warning( | ||||||
|                     "Found no information about Home Assistant. Retry in 30sec" |                     "Found no information about Home Assistant. Retrying in 30sec" | ||||||
|                 ) |                 ) | ||||||
|                 await asyncio.sleep(30) |                 await asyncio.sleep(30) | ||||||
|                 await self.sys_updater.reload() |                 await self.sys_updater.reload() | ||||||
| @@ -138,7 +145,7 @@ class HomeAssistantCore(JobGroup): | |||||||
|             except Exception as err:  # pylint: disable=broad-except |             except Exception as err:  # pylint: disable=broad-except | ||||||
|                 capture_exception(err) |                 capture_exception(err) | ||||||
|  |  | ||||||
|             _LOGGER.warning("Fails install landingpage, retry after 30sec") |             _LOGGER.warning("Failed to install landingpage, retrying after 30sec") | ||||||
|             await asyncio.sleep(30) |             await asyncio.sleep(30) | ||||||
|  |  | ||||||
|         self.sys_homeassistant.version = LANDINGPAGE |         self.sys_homeassistant.version = LANDINGPAGE | ||||||
| @@ -170,7 +177,7 @@ class HomeAssistantCore(JobGroup): | |||||||
|                 except Exception as err:  # pylint: disable=broad-except |                 except Exception as err:  # pylint: disable=broad-except | ||||||
|                     capture_exception(err) |                     capture_exception(err) | ||||||
|  |  | ||||||
|             _LOGGER.warning("Error on Home Assistant installation. Retry in 30sec") |             _LOGGER.warning("Error on Home Assistant installation. Retrying in 30sec") | ||||||
|             await asyncio.sleep(30) |             await asyncio.sleep(30) | ||||||
|  |  | ||||||
|         _LOGGER.info("Home Assistant docker now installed") |         _LOGGER.info("Home Assistant docker now installed") | ||||||
| @@ -402,7 +409,7 @@ class HomeAssistantCore(JobGroup): | |||||||
|     @property |     @property | ||||||
|     def in_progress(self) -> bool: |     def in_progress(self) -> bool: | ||||||
|         """Return True if a task is in progress.""" |         """Return True if a task is in progress.""" | ||||||
|         return self.instance.in_progress or self.lock.locked() |         return self.instance.in_progress or self.active_job | ||||||
|  |  | ||||||
|     async def check_config(self) -> ConfigResult: |     async def check_config(self) -> ConfigResult: | ||||||
|         """Run Home Assistant config check.""" |         """Run Home Assistant config check.""" | ||||||
| @@ -436,21 +443,38 @@ class HomeAssistantCore(JobGroup): | |||||||
|             return |             return | ||||||
|         _LOGGER.info("Wait until Home Assistant is ready") |         _LOGGER.info("Wait until Home Assistant is ready") | ||||||
|  |  | ||||||
|         while True: |         deadline = datetime.now() + STARTUP_API_RESPONSE_TIMEOUT | ||||||
|             await asyncio.sleep(5) |         last_state = None | ||||||
|  |         while not (timeout := datetime.now() >= deadline): | ||||||
|  |             await asyncio.sleep(SECONDS_BETWEEN_API_CHECKS) | ||||||
|  |  | ||||||
|             # 1: Check if Container is is_running |             # 1: Check if Container is is_running | ||||||
|             if not await self.instance.is_running(): |             if not await self.instance.is_running(): | ||||||
|                 _LOGGER.error("Home Assistant has crashed!") |                 _LOGGER.error("Home Assistant has crashed!") | ||||||
|                 break |                 break | ||||||
|  |  | ||||||
|             # 2: Check if API response |             # 2: Check API response | ||||||
|             if await self.sys_homeassistant.api.check_api_state(): |             if state := await self.sys_homeassistant.api.get_api_state(): | ||||||
|  |                 if last_state is None: | ||||||
|  |                     # API initially available, move deadline up and check API | ||||||
|  |                     # state to be running now | ||||||
|  |                     deadline = datetime.now() + STARTUP_API_CHECK_RUNNING_TIMEOUT | ||||||
|  |  | ||||||
|  |                 if last_state != state: | ||||||
|  |                     _LOGGER.info("Home Assistant Core state changed to %s", state) | ||||||
|  |                     last_state = state | ||||||
|  |  | ||||||
|  |                 if state == "RUNNING": | ||||||
|                     _LOGGER.info("Detect a running Home Assistant instance") |                     _LOGGER.info("Detect a running Home Assistant instance") | ||||||
|                     self._error_state = False |                     self._error_state = False | ||||||
|                     return |                     return | ||||||
|  |  | ||||||
|         self._error_state = True |         self._error_state = True | ||||||
|  |         if timeout: | ||||||
|  |             raise HomeAssistantStartupTimeout( | ||||||
|  |                 "No Home Assistant Core response, assuming a fatal startup error", | ||||||
|  |                 _LOGGER.error, | ||||||
|  |             ) | ||||||
|         raise HomeAssistantCrashError() |         raise HomeAssistantCrashError() | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| """Home Assistant control object.""" | """Home Assistant control object.""" | ||||||
| import asyncio | import asyncio | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
|  | import errno | ||||||
| from ipaddress import IPv4Address | from ipaddress import IPv4Address | ||||||
| import logging | import logging | ||||||
| from pathlib import Path, PurePath | from pathlib import Path, PurePath | ||||||
| @@ -18,6 +19,7 @@ from ..const import ( | |||||||
|     ATTR_ACCESS_TOKEN, |     ATTR_ACCESS_TOKEN, | ||||||
|     ATTR_AUDIO_INPUT, |     ATTR_AUDIO_INPUT, | ||||||
|     ATTR_AUDIO_OUTPUT, |     ATTR_AUDIO_OUTPUT, | ||||||
|  |     ATTR_BACKUPS_EXCLUDE_DATABASE, | ||||||
|     ATTR_BOOT, |     ATTR_BOOT, | ||||||
|     ATTR_IMAGE, |     ATTR_IMAGE, | ||||||
|     ATTR_PORT, |     ATTR_PORT, | ||||||
| @@ -41,6 +43,7 @@ from ..exceptions import ( | |||||||
| from ..hardware.const import PolicyGroup | from ..hardware.const import PolicyGroup | ||||||
| from ..hardware.data import Device | from ..hardware.data import Device | ||||||
| from ..jobs.decorator import Job, JobExecutionLimit | from ..jobs.decorator import Job, JobExecutionLimit | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
| from ..utils import remove_folder | from ..utils import remove_folder | ||||||
| from ..utils.common import FileConfiguration | from ..utils.common import FileConfiguration | ||||||
| from ..utils.json import read_json_file, write_json_file | from ..utils.json import read_json_file, write_json_file | ||||||
| @@ -62,6 +65,10 @@ HOMEASSISTANT_BACKUP_EXCLUDE = [ | |||||||
|     "*.log.*", |     "*.log.*", | ||||||
|     "OZW_Log.txt", |     "OZW_Log.txt", | ||||||
| ] | ] | ||||||
|  | HOMEASSISTANT_BACKUP_EXCLUDE_DATABASE = [ | ||||||
|  |     "home-assistant_v?.db", | ||||||
|  |     "home-assistant_v?.db-wal", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  |  | ||||||
| class HomeAssistant(FileConfiguration, CoreSysAttributes): | class HomeAssistant(FileConfiguration, CoreSysAttributes): | ||||||
| @@ -258,6 +265,16 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|         except (AwesomeVersionException, TypeError): |         except (AwesomeVersionException, TypeError): | ||||||
|             return False |             return False | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def backups_exclude_database(self) -> bool: | ||||||
|  |         """Exclude database from core backups by default.""" | ||||||
|  |         return self._data[ATTR_BACKUPS_EXCLUDE_DATABASE] | ||||||
|  |  | ||||||
|  |     @backups_exclude_database.setter | ||||||
|  |     def backups_exclude_database(self, value: bool) -> None: | ||||||
|  |         """Set whether backups should exclude database by default.""" | ||||||
|  |         self._data[ATTR_BACKUPS_EXCLUDE_DATABASE] = value | ||||||
|  |  | ||||||
|     async def load(self) -> None: |     async def load(self) -> None: | ||||||
|         """Prepare Home Assistant object.""" |         """Prepare Home Assistant object.""" | ||||||
|         await asyncio.wait( |         await asyncio.wait( | ||||||
| @@ -285,6 +302,8 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|         try: |         try: | ||||||
|             self.path_pulse.write_text(pulse_config, encoding="utf-8") |             self.path_pulse.write_text(pulse_config, encoding="utf-8") | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error("Home Assistant can't write pulse/client.config: %s", err) |             _LOGGER.error("Home Assistant can't write pulse/client.config: %s", err) | ||||||
|         else: |         else: | ||||||
|             _LOGGER.info("Update pulse/client.config: %s", self.path_pulse) |             _LOGGER.info("Update pulse/client.config: %s", self.path_pulse) | ||||||
| @@ -327,7 +346,9 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|             ) |             ) | ||||||
|  |  | ||||||
|     @Job(name="home_assistant_module_backup") |     @Job(name="home_assistant_module_backup") | ||||||
|     async def backup(self, tar_file: tarfile.TarFile) -> None: |     async def backup( | ||||||
|  |         self, tar_file: tarfile.TarFile, exclude_database: bool = False | ||||||
|  |     ) -> None: | ||||||
|         """Backup Home Assistant Core config/ directory.""" |         """Backup Home Assistant Core config/ directory.""" | ||||||
|         await self.begin_backup() |         await self.begin_backup() | ||||||
|         try: |         try: | ||||||
| @@ -351,11 +372,16 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|                         # Backup metadata |                         # Backup metadata | ||||||
|                         backup.add(temp, arcname=".") |                         backup.add(temp, arcname=".") | ||||||
|  |  | ||||||
|  |                         # Set excludes | ||||||
|  |                         excludes = HOMEASSISTANT_BACKUP_EXCLUDE.copy() | ||||||
|  |                         if exclude_database: | ||||||
|  |                             excludes += HOMEASSISTANT_BACKUP_EXCLUDE_DATABASE | ||||||
|  |  | ||||||
|                         # Backup data |                         # Backup data | ||||||
|                         atomic_contents_add( |                         atomic_contents_add( | ||||||
|                             backup, |                             backup, | ||||||
|                             self.sys_config.path_homeassistant, |                             self.sys_config.path_homeassistant, | ||||||
|                             excludes=HOMEASSISTANT_BACKUP_EXCLUDE, |                             excludes=excludes, | ||||||
|                             arcname="data", |                             arcname="data", | ||||||
|                         ) |                         ) | ||||||
|  |  | ||||||
| @@ -371,7 +397,10 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|         finally: |         finally: | ||||||
|             await self.end_backup() |             await self.end_backup() | ||||||
|  |  | ||||||
|     async def restore(self, tar_file: tarfile.TarFile) -> None: |     @Job(name="home_assistant_module_restore") | ||||||
|  |     async def restore( | ||||||
|  |         self, tar_file: tarfile.TarFile, exclude_database: bool = False | ||||||
|  |     ) -> None: | ||||||
|         """Restore Home Assistant Core config/ directory.""" |         """Restore Home Assistant Core config/ directory.""" | ||||||
|         with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp: |         with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp: | ||||||
|             temp_path = Path(temp) |             temp_path = Path(temp) | ||||||
| @@ -382,7 +411,11 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|             def _extract_tarfile(): |             def _extract_tarfile(): | ||||||
|                 """Extract tar backup.""" |                 """Extract tar backup.""" | ||||||
|                 with tar_file as backup: |                 with tar_file as backup: | ||||||
|                     backup.extractall(path=temp_path, members=secure_path(backup)) |                     backup.extractall( | ||||||
|  |                         path=temp_path, | ||||||
|  |                         members=secure_path(backup), | ||||||
|  |                         filter="fully_trusted", | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 await self.sys_run_in_executor(_extract_tarfile) |                 await self.sys_run_in_executor(_extract_tarfile) | ||||||
| @@ -399,11 +432,22 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|             def _restore_data(): |             def _restore_data(): | ||||||
|                 """Restore data.""" |                 """Restore data.""" | ||||||
|                 shutil.copytree( |                 shutil.copytree( | ||||||
|                     temp_data, self.sys_config.path_homeassistant, symlinks=True |                     temp_data, | ||||||
|  |                     self.sys_config.path_homeassistant, | ||||||
|  |                     symlinks=True, | ||||||
|  |                     dirs_exist_ok=True, | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|             _LOGGER.info("Restore Home Assistant Core config folder") |             _LOGGER.info("Restore Home Assistant Core config folder") | ||||||
|             await remove_folder(self.sys_config.path_homeassistant) |             excludes = ( | ||||||
|  |                 HOMEASSISTANT_BACKUP_EXCLUDE_DATABASE if exclude_database else None | ||||||
|  |             ) | ||||||
|  |             await remove_folder( | ||||||
|  |                 self.sys_config.path_homeassistant, | ||||||
|  |                 content_only=True, | ||||||
|  |                 excludes=excludes, | ||||||
|  |                 tmp_dir=self.sys_config.path_tmp, | ||||||
|  |             ) | ||||||
|             try: |             try: | ||||||
|                 await self.sys_run_in_executor(_restore_data) |                 await self.sys_run_in_executor(_restore_data) | ||||||
|             except shutil.Error as err: |             except shutil.Error as err: | ||||||
| @@ -441,6 +485,7 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|                 ATTR_REFRESH_TOKEN, |                 ATTR_REFRESH_TOKEN, | ||||||
|                 ATTR_WATCHDOG, |                 ATTR_WATCHDOG, | ||||||
|             ): |             ): | ||||||
|  |                 if attr in data: | ||||||
|                     self._data[attr] = data[attr] |                     self._data[attr] = data[attr] | ||||||
|  |  | ||||||
|     @Job( |     @Job( | ||||||
| @@ -455,6 +500,7 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|             {ATTR_TYPE: "config/auth/list"} |             {ATTR_TYPE: "config/auth/list"} | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         if list_of_users: | ||||||
|             return [ |             return [ | ||||||
|                 IngressSessionDataUser( |                 IngressSessionDataUser( | ||||||
|                     id=data["id"], |                     id=data["id"], | ||||||
| @@ -463,3 +509,4 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes): | |||||||
|                 ) |                 ) | ||||||
|                 for data in list_of_users |                 for data in list_of_users | ||||||
|             ] |             ] | ||||||
|  |         return [] | ||||||
|   | |||||||
| @@ -7,6 +7,7 @@ from ..const import ( | |||||||
|     ATTR_ACCESS_TOKEN, |     ATTR_ACCESS_TOKEN, | ||||||
|     ATTR_AUDIO_INPUT, |     ATTR_AUDIO_INPUT, | ||||||
|     ATTR_AUDIO_OUTPUT, |     ATTR_AUDIO_OUTPUT, | ||||||
|  |     ATTR_BACKUPS_EXCLUDE_DATABASE, | ||||||
|     ATTR_BOOT, |     ATTR_BOOT, | ||||||
|     ATTR_IMAGE, |     ATTR_IMAGE, | ||||||
|     ATTR_PORT, |     ATTR_PORT, | ||||||
| @@ -32,6 +33,7 @@ SCHEMA_HASS_CONFIG = vol.Schema( | |||||||
|         vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(), |         vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(), | ||||||
|         vol.Optional(ATTR_AUDIO_OUTPUT, default=None): vol.Maybe(str), |         vol.Optional(ATTR_AUDIO_OUTPUT, default=None): vol.Maybe(str), | ||||||
|         vol.Optional(ATTR_AUDIO_INPUT, default=None): vol.Maybe(str), |         vol.Optional(ATTR_AUDIO_INPUT, default=None): vol.Maybe(str), | ||||||
|  |         vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE, default=False): vol.Boolean(), | ||||||
|     }, |     }, | ||||||
|     extra=vol.REMOVE_EXTRA, |     extra=vol.REMOVE_EXTRA, | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -154,7 +154,7 @@ class WSClient: | |||||||
|     @classmethod |     @classmethod | ||||||
|     async def connect_with_auth( |     async def connect_with_auth( | ||||||
|         cls, session: aiohttp.ClientSession, loop, url: str, token: str |         cls, session: aiohttp.ClientSession, loop, url: str, token: str | ||||||
|     ) -> "WSClient": |     ) -> WSClient: | ||||||
|         """Create an authenticated websocket client.""" |         """Create an authenticated websocket client.""" | ||||||
|         try: |         try: | ||||||
|             client = await session.ws_connect(url, ssl=False) |             client = await session.ws_connect(url, ssl=False) | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| """AppArmor control for host.""" | """AppArmor control for host.""" | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
|  | import errno | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| import shutil | import shutil | ||||||
| @@ -9,7 +10,7 @@ from awesomeversion import AwesomeVersion | |||||||
|  |  | ||||||
| from ..coresys import CoreSys, CoreSysAttributes | from ..coresys import CoreSys, CoreSysAttributes | ||||||
| from ..exceptions import DBusError, HostAppArmorError | from ..exceptions import DBusError, HostAppArmorError | ||||||
| from ..resolution.const import UnsupportedReason | from ..resolution.const import UnhealthyReason, UnsupportedReason | ||||||
| from ..utils.apparmor import validate_profile | from ..utils.apparmor import validate_profile | ||||||
| from .const import HostFeature | from .const import HostFeature | ||||||
|  |  | ||||||
| @@ -80,6 +81,8 @@ class AppArmorControl(CoreSysAttributes): | |||||||
|         try: |         try: | ||||||
|             await self.sys_run_in_executor(shutil.copyfile, profile_file, dest_profile) |             await self.sys_run_in_executor(shutil.copyfile, profile_file, dest_profile) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             raise HostAppArmorError( |             raise HostAppArmorError( | ||||||
|                 f"Can't copy {profile_file}: {err}", _LOGGER.error |                 f"Can't copy {profile_file}: {err}", _LOGGER.error | ||||||
|             ) from err |             ) from err | ||||||
| @@ -103,6 +106,8 @@ class AppArmorControl(CoreSysAttributes): | |||||||
|         try: |         try: | ||||||
|             await self.sys_run_in_executor(profile_file.unlink) |             await self.sys_run_in_executor(profile_file.unlink) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             raise HostAppArmorError( |             raise HostAppArmorError( | ||||||
|                 f"Can't remove profile: {err}", _LOGGER.error |                 f"Can't remove profile: {err}", _LOGGER.error | ||||||
|             ) from err |             ) from err | ||||||
| @@ -117,6 +122,8 @@ class AppArmorControl(CoreSysAttributes): | |||||||
|         try: |         try: | ||||||
|             await self.sys_run_in_executor(shutil.copy, profile_file, backup_file) |             await self.sys_run_in_executor(shutil.copy, profile_file, backup_file) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             raise HostAppArmorError( |             raise HostAppArmorError( | ||||||
|                 f"Can't backup profile {profile_name}: {err}", _LOGGER.error |                 f"Can't backup profile {profile_name}: {err}", _LOGGER.error | ||||||
|             ) from err |             ) from err | ||||||
|   | |||||||
| @@ -175,4 +175,4 @@ class HostManager(CoreSysAttributes): | |||||||
|     async def _hardware_events(self, device: Device) -> None: |     async def _hardware_events(self, device: Device) -> None: | ||||||
|         """Process hardware requests.""" |         """Process hardware requests.""" | ||||||
|         if self.sys_hardware.policy.is_match_cgroup(PolicyGroup.AUDIO, device): |         if self.sys_hardware.policy.is_match_cgroup(PolicyGroup.AUDIO, device): | ||||||
|             await self.sound.update() |             await self.sound.update(reload_pulse=True) | ||||||
|   | |||||||
| @@ -189,6 +189,7 @@ class NetworkManager(CoreSysAttributes): | |||||||
|             _LOGGER.debug("Updating existing configuration for %s", interface.name) |             _LOGGER.debug("Updating existing configuration for %s", interface.name) | ||||||
|             settings = get_connection_from_interface( |             settings = get_connection_from_interface( | ||||||
|                 interface, |                 interface, | ||||||
|  |                 self.sys_dbus.network, | ||||||
|                 name=inet.settings.connection.id, |                 name=inet.settings.connection.id, | ||||||
|                 uuid=inet.settings.connection.uuid, |                 uuid=inet.settings.connection.uuid, | ||||||
|             ) |             ) | ||||||
| @@ -217,7 +218,7 @@ class NetworkManager(CoreSysAttributes): | |||||||
|         # Create new configuration and activate interface |         # Create new configuration and activate interface | ||||||
|         elif inet and interface.enabled: |         elif inet and interface.enabled: | ||||||
|             _LOGGER.debug("Create new configuration for %s", interface.name) |             _LOGGER.debug("Create new configuration for %s", interface.name) | ||||||
|             settings = get_connection_from_interface(interface) |             settings = get_connection_from_interface(interface, self.sys_dbus.network) | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 settings, con = await self.sys_dbus.network.add_and_activate_connection( |                 settings, con = await self.sys_dbus.network.add_and_activate_connection( | ||||||
| @@ -244,7 +245,7 @@ class NetworkManager(CoreSysAttributes): | |||||||
|  |  | ||||||
|         # Create new interface (like vlan) |         # Create new interface (like vlan) | ||||||
|         elif not inet: |         elif not inet: | ||||||
|             settings = get_connection_from_interface(interface) |             settings = get_connection_from_interface(interface, self.sys_dbus.network) | ||||||
|  |  | ||||||
|             try: |             try: | ||||||
|                 await self.sys_dbus.network.settings.add_connection(settings) |                 await self.sys_dbus.network.settings.add_connection(settings) | ||||||
|   | |||||||
| @@ -15,6 +15,9 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) | |||||||
|  |  | ||||||
| PULSE_NAME = "supervisor" | PULSE_NAME = "supervisor" | ||||||
|  |  | ||||||
|  | PULSE_ALSA_MODULE = "module-alsa-card" | ||||||
|  | PULSE_UDEV_MODULE = "module-udev-detect" | ||||||
|  |  | ||||||
|  |  | ||||||
| class StreamType(StrEnum): | class StreamType(StrEnum): | ||||||
|     """INPUT/OUTPUT type of source.""" |     """INPUT/OUTPUT type of source.""" | ||||||
| @@ -235,9 +238,9 @@ class SoundControl(CoreSysAttributes): | |||||||
|     @Job( |     @Job( | ||||||
|         name="sound_control_update", |         name="sound_control_update", | ||||||
|         limit=JobExecutionLimit.THROTTLE_WAIT, |         limit=JobExecutionLimit.THROTTLE_WAIT, | ||||||
|         throttle_period=timedelta(seconds=10), |         throttle_period=timedelta(seconds=2), | ||||||
|     ) |     ) | ||||||
|     async def update(self): |     async def update(self, reload_pulse: bool = False): | ||||||
|         """Update properties over dbus.""" |         """Update properties over dbus.""" | ||||||
|         _LOGGER.info("Updating PulseAudio information") |         _LOGGER.info("Updating PulseAudio information") | ||||||
|  |  | ||||||
| @@ -348,11 +351,32 @@ class SoundControl(CoreSysAttributes): | |||||||
|                     f"Error while processing pulse update: {err}", _LOGGER.error |                     f"Error while processing pulse update: {err}", _LOGGER.error | ||||||
|                 ) from err |                 ) from err | ||||||
|             except PulseError as err: |             except PulseError as err: | ||||||
|                 _LOGGER.debug("Can't update PulseAudio data: %s", err) |                 _LOGGER.warning("Can't update PulseAudio data: %s", err) | ||||||
|  |  | ||||||
|             return data |             return data | ||||||
|  |  | ||||||
|  |         def _reload_pulse_modules(): | ||||||
|  |             try: | ||||||
|  |                 with Pulse(PULSE_NAME) as pulse: | ||||||
|  |                     modules = pulse.module_list() | ||||||
|  |                     for alsa_module in filter( | ||||||
|  |                         lambda x: x.name == PULSE_ALSA_MODULE, modules | ||||||
|  |                     ): | ||||||
|  |                         pulse.module_unload(alsa_module.index) | ||||||
|  |                     udev_module = next( | ||||||
|  |                         filter(lambda x: x.name == PULSE_UDEV_MODULE, modules) | ||||||
|  |                     ) | ||||||
|  |                     pulse.module_unload(udev_module.index) | ||||||
|  |                     # And now reload | ||||||
|  |                     pulse.module_load(PULSE_UDEV_MODULE) | ||||||
|  |             except StopIteration: | ||||||
|  |                 _LOGGER.warning("Can't reload PulseAudio modules.") | ||||||
|  |             except PulseError as err: | ||||||
|  |                 _LOGGER.warning("Can't reload PulseAudio modules: %s", err) | ||||||
|  |  | ||||||
|         # Update data from pulse server |         # Update data from pulse server | ||||||
|  |         if reload_pulse: | ||||||
|  |             await self.sys_run_in_executor(_reload_pulse_modules) | ||||||
|         data: PulseData = await self.sys_run_in_executor(_get_pulse_data) |         data: PulseData = await self.sys_run_in_executor(_get_pulse_data) | ||||||
|         self._applications = data.applications |         self._applications = data.applications | ||||||
|         self._cards = data.cards |         self._cards = data.cards | ||||||
|   | |||||||
| @@ -38,7 +38,9 @@ class Ingress(FileConfiguration, CoreSysAttributes): | |||||||
|  |  | ||||||
|     def get_session_data(self, session_id: str) -> IngressSessionData | None: |     def get_session_data(self, session_id: str) -> IngressSessionData | None: | ||||||
|         """Return complementary data of current session or None.""" |         """Return complementary data of current session or None.""" | ||||||
|         return self.sessions_data.get(session_id) |         if data := self.sessions_data.get(session_id): | ||||||
|  |             return IngressSessionData.from_dict(data) | ||||||
|  |         return None | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def sessions(self) -> dict[str, float]: |     def sessions(self) -> dict[str, float]: | ||||||
| @@ -46,7 +48,7 @@ class Ingress(FileConfiguration, CoreSysAttributes): | |||||||
|         return self._data[ATTR_SESSION] |         return self._data[ATTR_SESSION] | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def sessions_data(self) -> dict[str, IngressSessionData]: |     def sessions_data(self) -> dict[str, dict[str, str | None]]: | ||||||
|         """Return sessions_data.""" |         """Return sessions_data.""" | ||||||
|         return self._data[ATTR_SESSION_DATA] |         return self._data[ATTR_SESSION_DATA] | ||||||
|  |  | ||||||
| @@ -86,7 +88,7 @@ class Ingress(FileConfiguration, CoreSysAttributes): | |||||||
|         now = utcnow() |         now = utcnow() | ||||||
|  |  | ||||||
|         sessions = {} |         sessions = {} | ||||||
|         sessions_data: dict[str, IngressSessionData] = {} |         sessions_data: dict[str, dict[str, str | None]] = {} | ||||||
|         for session, valid in self.sessions.items(): |         for session, valid in self.sessions.items(): | ||||||
|             # check if timestamp valid, to avoid crash on malformed timestamp |             # check if timestamp valid, to avoid crash on malformed timestamp | ||||||
|             try: |             try: | ||||||
| @@ -100,7 +102,8 @@ class Ingress(FileConfiguration, CoreSysAttributes): | |||||||
|  |  | ||||||
|             # Is valid |             # Is valid | ||||||
|             sessions[session] = valid |             sessions[session] = valid | ||||||
|             sessions_data[session] = self.get_session_data(session) |             if session_data := self.sessions_data.get(session): | ||||||
|  |                 sessions_data[session] = session_data | ||||||
|  |  | ||||||
|         # Write back |         # Write back | ||||||
|         self.sessions.clear() |         self.sessions.clear() | ||||||
| @@ -123,7 +126,7 @@ class Ingress(FileConfiguration, CoreSysAttributes): | |||||||
|  |  | ||||||
|         self.sessions[session] = valid.timestamp() |         self.sessions[session] = valid.timestamp() | ||||||
|         if data is not None: |         if data is not None: | ||||||
|             self.sessions_data[session] = data |             self.sessions_data[session] = data.to_dict() | ||||||
|  |  | ||||||
|         return session |         return session | ||||||
|  |  | ||||||
| @@ -151,7 +154,7 @@ class Ingress(FileConfiguration, CoreSysAttributes): | |||||||
|  |  | ||||||
|         return True |         return True | ||||||
|  |  | ||||||
|     def get_dynamic_port(self, addon_slug: str) -> int: |     async def get_dynamic_port(self, addon_slug: str) -> int: | ||||||
|         """Get/Create a dynamic port from range.""" |         """Get/Create a dynamic port from range.""" | ||||||
|         if addon_slug in self.ports: |         if addon_slug in self.ports: | ||||||
|             return self.ports[addon_slug] |             return self.ports[addon_slug] | ||||||
| @@ -160,7 +163,7 @@ class Ingress(FileConfiguration, CoreSysAttributes): | |||||||
|         while ( |         while ( | ||||||
|             port is None |             port is None | ||||||
|             or port in self.ports.values() |             or port in self.ports.values() | ||||||
|             or check_port(self.sys_docker.network.gateway, port) |             or await check_port(self.sys_docker.network.gateway, port) | ||||||
|         ): |         ): | ||||||
|             port = random.randint(62000, 65500) |             port = random.randint(62000, 65500) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -86,7 +86,7 @@ class SupervisorJob: | |||||||
|         } |         } | ||||||
|  |  | ||||||
|     @contextmanager |     @contextmanager | ||||||
|     def start(self, *, on_done: Callable[["SupervisorJob"], None] | None = None): |     def start(self): | ||||||
|         """Start the job in the current task. |         """Start the job in the current task. | ||||||
|  |  | ||||||
|         This can only be called if the parent ID matches the job running in the current task. |         This can only be called if the parent ID matches the job running in the current task. | ||||||
| @@ -107,8 +107,6 @@ class SupervisorJob: | |||||||
|             self.done = True |             self.done = True | ||||||
|             if token: |             if token: | ||||||
|                 _CURRENT_JOB.reset(token) |                 _CURRENT_JOB.reset(token) | ||||||
|             if on_done: |  | ||||||
|                 on_done(self) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class JobManager(FileConfiguration, CoreSysAttributes): | class JobManager(FileConfiguration, CoreSysAttributes): | ||||||
| @@ -192,7 +190,7 @@ class JobManager(FileConfiguration, CoreSysAttributes): | |||||||
|         if job.uuid not in self._jobs: |         if job.uuid not in self._jobs: | ||||||
|             raise JobNotFound(f"Could not find job {job.name}", _LOGGER.error) |             raise JobNotFound(f"Could not find job {job.name}", _LOGGER.error) | ||||||
|  |  | ||||||
|         if not job.done: |         if job.done is False: | ||||||
|             _LOGGER.warning("Removing incomplete job %s from job manager", job.name) |             _LOGGER.warning("Removing incomplete job %s from job manager", job.name) | ||||||
|  |  | ||||||
|         del self._jobs[job.uuid] |         del self._jobs[job.uuid] | ||||||
|   | |||||||
| @@ -174,6 +174,14 @@ class Job(CoreSysAttributes): | |||||||
|             return obj |             return obj | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|  |     def _handle_job_condition_exception(self, err: JobConditionException) -> None: | ||||||
|  |         """Handle a job condition failure.""" | ||||||
|  |         error_msg = str(err) | ||||||
|  |         if self.on_condition is None: | ||||||
|  |             _LOGGER.info(error_msg) | ||||||
|  |             return | ||||||
|  |         raise self.on_condition(error_msg, _LOGGER.warning) from None | ||||||
|  |  | ||||||
|     def __call__(self, method): |     def __call__(self, method): | ||||||
|         """Call the wrapper logic.""" |         """Call the wrapper logic.""" | ||||||
|         self._method = method |         self._method = method | ||||||
| @@ -193,26 +201,30 @@ class Job(CoreSysAttributes): | |||||||
|                 internal=self._internal, |                 internal=self._internal, | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  |             try: | ||||||
|                 # Handle condition |                 # Handle condition | ||||||
|                 if self.conditions: |                 if self.conditions: | ||||||
|                     try: |                     try: | ||||||
|                     await self._check_conditions() |                         await Job.check_conditions( | ||||||
|  |                             self, set(self.conditions), self._method.__qualname__ | ||||||
|  |                         ) | ||||||
|                     except JobConditionException as err: |                     except JobConditionException as err: | ||||||
|                     error_msg = str(err) |                         return self._handle_job_condition_exception(err) | ||||||
|                     if self.on_condition is None: |  | ||||||
|                         _LOGGER.info(error_msg) |  | ||||||
|                         return |  | ||||||
|                     raise self.on_condition(error_msg, _LOGGER.warning) from None |  | ||||||
|  |  | ||||||
|                 # Handle exection limits |                 # Handle exection limits | ||||||
|             if self.limit in (JobExecutionLimit.SINGLE_WAIT, JobExecutionLimit.ONCE): |                 if self.limit in ( | ||||||
|  |                     JobExecutionLimit.SINGLE_WAIT, | ||||||
|  |                     JobExecutionLimit.ONCE, | ||||||
|  |                 ): | ||||||
|                     await self._acquire_exection_limit() |                     await self._acquire_exection_limit() | ||||||
|                 elif self.limit in ( |                 elif self.limit in ( | ||||||
|                     JobExecutionLimit.GROUP_ONCE, |                     JobExecutionLimit.GROUP_ONCE, | ||||||
|                     JobExecutionLimit.GROUP_WAIT, |                     JobExecutionLimit.GROUP_WAIT, | ||||||
|                 ): |                 ): | ||||||
|                     try: |                     try: | ||||||
|                     await obj.acquire(job, self.limit == JobExecutionLimit.GROUP_WAIT) |                         await obj.acquire( | ||||||
|  |                             job, self.limit == JobExecutionLimit.GROUP_WAIT | ||||||
|  |                         ) | ||||||
|                     except JobGroupExecutionLimitExceeded as err: |                     except JobGroupExecutionLimitExceeded as err: | ||||||
|                         if self.on_condition: |                         if self.on_condition: | ||||||
|                             raise self.on_condition(str(err)) from err |                             raise self.on_condition(str(err)) from err | ||||||
| @@ -238,26 +250,35 @@ class Job(CoreSysAttributes): | |||||||
|                     JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT, |                     JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT, | ||||||
|                 ): |                 ): | ||||||
|                     # Only reprocess array when necessary (at limit) |                     # Only reprocess array when necessary (at limit) | ||||||
|                 if len(self.rate_limited_calls(group_name)) >= self.throttle_max_calls: |                     if ( | ||||||
|  |                         len(self.rate_limited_calls(group_name)) | ||||||
|  |                         >= self.throttle_max_calls | ||||||
|  |                     ): | ||||||
|                         self.set_rate_limited_calls( |                         self.set_rate_limited_calls( | ||||||
|                             [ |                             [ | ||||||
|                                 call |                                 call | ||||||
|                                 for call in self.rate_limited_calls(group_name) |                                 for call in self.rate_limited_calls(group_name) | ||||||
|                             if call > datetime.now() - self.throttle_period(group_name) |                                 if call | ||||||
|  |                                 > datetime.now() - self.throttle_period(group_name) | ||||||
|                             ], |                             ], | ||||||
|                             group_name, |                             group_name, | ||||||
|                         ) |                         ) | ||||||
|  |  | ||||||
|                 if len(self.rate_limited_calls(group_name)) >= self.throttle_max_calls: |                     if ( | ||||||
|  |                         len(self.rate_limited_calls(group_name)) | ||||||
|  |                         >= self.throttle_max_calls | ||||||
|  |                     ): | ||||||
|                         on_condition = ( |                         on_condition = ( | ||||||
|                         JobException if self.on_condition is None else self.on_condition |                             JobException | ||||||
|  |                             if self.on_condition is None | ||||||
|  |                             else self.on_condition | ||||||
|                         ) |                         ) | ||||||
|                         raise on_condition( |                         raise on_condition( | ||||||
|                         f"Rate limit exceeded, more then {self.throttle_max_calls} calls in {self.throttle_period(group_name)}", |                             f"Rate limit exceeded, more than {self.throttle_max_calls} calls in {self.throttle_period(group_name)}", | ||||||
|                         ) |                         ) | ||||||
|  |  | ||||||
|                 # Execute Job |                 # Execute Job | ||||||
|             with job.start(on_done=self.sys_jobs.remove_job if self.cleanup else None): |                 with job.start(): | ||||||
|                     try: |                     try: | ||||||
|                         self.set_last_call(datetime.now(), group_name) |                         self.set_last_call(datetime.now(), group_name) | ||||||
|                         if self.rate_limited_calls(group_name) is not None: |                         if self.rate_limited_calls(group_name) is not None: | ||||||
| @@ -266,6 +287,11 @@ class Job(CoreSysAttributes): | |||||||
|                             ) |                             ) | ||||||
|  |  | ||||||
|                         return await self._method(obj, *args, **kwargs) |                         return await self._method(obj, *args, **kwargs) | ||||||
|  |  | ||||||
|  |                     # If a method has a conditional JobCondition, they must check it in the method | ||||||
|  |                     # These should be handled like normal JobConditions as much as possible | ||||||
|  |                     except JobConditionException as err: | ||||||
|  |                         return self._handle_job_condition_exception(err) | ||||||
|                     except HassioError as err: |                     except HassioError as err: | ||||||
|                         raise err |                         raise err | ||||||
|                     except Exception as err: |                     except Exception as err: | ||||||
| @@ -280,12 +306,20 @@ class Job(CoreSysAttributes): | |||||||
|                         ): |                         ): | ||||||
|                             obj.release() |                             obj.release() | ||||||
|  |  | ||||||
|  |             # Jobs that weren't started are always cleaned up. Also clean up done jobs if required | ||||||
|  |             finally: | ||||||
|  |                 if job.done is None or self.cleanup: | ||||||
|  |                     self.sys_jobs.remove_job(job) | ||||||
|  |  | ||||||
|         return wrapper |         return wrapper | ||||||
|  |  | ||||||
|     async def _check_conditions(self): |     @staticmethod | ||||||
|  |     async def check_conditions( | ||||||
|  |         coresys: CoreSysAttributes, conditions: set[JobCondition], method_name: str | ||||||
|  |     ): | ||||||
|         """Check conditions.""" |         """Check conditions.""" | ||||||
|         used_conditions = set(self.conditions) - set(self.sys_jobs.ignore_conditions) |         used_conditions = set(conditions) - set(coresys.sys_jobs.ignore_conditions) | ||||||
|         ignored_conditions = set(self.conditions) & set(self.sys_jobs.ignore_conditions) |         ignored_conditions = set(conditions) & set(coresys.sys_jobs.ignore_conditions) | ||||||
|  |  | ||||||
|         # Check if somethings is ignored |         # Check if somethings is ignored | ||||||
|         if ignored_conditions: |         if ignored_conditions: | ||||||
| @@ -294,93 +328,97 @@ class Job(CoreSysAttributes): | |||||||
|                 ignored_conditions, |                 ignored_conditions, | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if JobCondition.HEALTHY in used_conditions and not self.sys_core.healthy: |         if JobCondition.HEALTHY in used_conditions and not coresys.sys_core.healthy: | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, system is not healthy - {', '.join(self.sys_resolution.unhealthy)}" |                 f"'{method_name}' blocked from execution, system is not healthy - {', '.join(coresys.sys_resolution.unhealthy)}" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.RUNNING in used_conditions |             JobCondition.RUNNING in used_conditions | ||||||
|             and self.sys_core.state != CoreState.RUNNING |             and coresys.sys_core.state != CoreState.RUNNING | ||||||
|         ): |         ): | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, system is not running - {self.sys_core.state!s}" |                 f"'{method_name}' blocked from execution, system is not running - {coresys.sys_core.state!s}" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.FROZEN in used_conditions |             JobCondition.FROZEN in used_conditions | ||||||
|             and self.sys_core.state != CoreState.FREEZE |             and coresys.sys_core.state != CoreState.FREEZE | ||||||
|         ): |         ): | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, system is not frozen - {self.sys_core.state!s}" |                 f"'{method_name}' blocked from execution, system is not frozen - {coresys.sys_core.state!s}" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.FREE_SPACE in used_conditions |             JobCondition.FREE_SPACE in used_conditions | ||||||
|             and self.sys_host.info.free_space < MINIMUM_FREE_SPACE_THRESHOLD |             and coresys.sys_host.info.free_space < MINIMUM_FREE_SPACE_THRESHOLD | ||||||
|         ): |         ): | ||||||
|             self.sys_resolution.create_issue(IssueType.FREE_SPACE, ContextType.SYSTEM) |             coresys.sys_resolution.create_issue( | ||||||
|  |                 IssueType.FREE_SPACE, ContextType.SYSTEM | ||||||
|  |             ) | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, not enough free space ({self.sys_host.info.free_space}GB) left on the device" |                 f"'{method_name}' blocked from execution, not enough free space ({coresys.sys_host.info.free_space}GB) left on the device" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if JobCondition.INTERNET_SYSTEM in used_conditions: |         if JobCondition.INTERNET_SYSTEM in used_conditions: | ||||||
|             await self.sys_supervisor.check_connectivity() |             await coresys.sys_supervisor.check_connectivity() | ||||||
|             if not self.sys_supervisor.connectivity: |             if not coresys.sys_supervisor.connectivity: | ||||||
|                 raise JobConditionException( |                 raise JobConditionException( | ||||||
|                     f"'{self._method.__qualname__}' blocked from execution, no supervisor internet connection" |                     f"'{method_name}' blocked from execution, no supervisor internet connection" | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|         if JobCondition.INTERNET_HOST in used_conditions: |         if JobCondition.INTERNET_HOST in used_conditions: | ||||||
|             await self.sys_host.network.check_connectivity() |             await coresys.sys_host.network.check_connectivity() | ||||||
|             if ( |             if ( | ||||||
|                 self.sys_host.network.connectivity is not None |                 coresys.sys_host.network.connectivity is not None | ||||||
|                 and not self.sys_host.network.connectivity |                 and not coresys.sys_host.network.connectivity | ||||||
|             ): |             ): | ||||||
|                 raise JobConditionException( |                 raise JobConditionException( | ||||||
|                     f"'{self._method.__qualname__}' blocked from execution, no host internet connection" |                     f"'{method_name}' blocked from execution, no host internet connection" | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|         if JobCondition.HAOS in used_conditions and not self.sys_os.available: |         if JobCondition.HAOS in used_conditions and not coresys.sys_os.available: | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, no Home Assistant OS available" |                 f"'{method_name}' blocked from execution, no Home Assistant OS available" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.OS_AGENT in used_conditions |             JobCondition.OS_AGENT in used_conditions | ||||||
|             and HostFeature.OS_AGENT not in self.sys_host.features |             and HostFeature.OS_AGENT not in coresys.sys_host.features | ||||||
|         ): |         ): | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, no Home Assistant OS-Agent available" |                 f"'{method_name}' blocked from execution, no Home Assistant OS-Agent available" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.HOST_NETWORK in used_conditions |             JobCondition.HOST_NETWORK in used_conditions | ||||||
|             and not self.sys_dbus.network.is_connected |             and not coresys.sys_dbus.network.is_connected | ||||||
|         ): |         ): | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, host Network Manager not available" |                 f"'{method_name}' blocked from execution, host Network Manager not available" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.AUTO_UPDATE in used_conditions |             JobCondition.AUTO_UPDATE in used_conditions | ||||||
|             and not self.sys_updater.auto_update |             and not coresys.sys_updater.auto_update | ||||||
|         ): |         ): | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, supervisor auto updates disabled" |                 f"'{method_name}' blocked from execution, supervisor auto updates disabled" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.SUPERVISOR_UPDATED in used_conditions |             JobCondition.SUPERVISOR_UPDATED in used_conditions | ||||||
|             and self.sys_supervisor.need_update |             and coresys.sys_supervisor.need_update | ||||||
|         ): |         ): | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, supervisor needs to be updated first" |                 f"'{method_name}' blocked from execution, supervisor needs to be updated first" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         if JobCondition.PLUGINS_UPDATED in used_conditions and ( |         if JobCondition.PLUGINS_UPDATED in used_conditions and ( | ||||||
|             out_of_date := [ |             out_of_date := [ | ||||||
|                 plugin for plugin in self.sys_plugins.all_plugins if plugin.need_update |                 plugin | ||||||
|  |                 for plugin in coresys.sys_plugins.all_plugins | ||||||
|  |                 if plugin.need_update | ||||||
|             ] |             ] | ||||||
|         ): |         ): | ||||||
|             errors = await asyncio.gather( |             errors = await asyncio.gather( | ||||||
| @@ -391,15 +429,15 @@ class Job(CoreSysAttributes): | |||||||
|                 out_of_date[i].slug for i in range(len(errors)) if errors[i] is not None |                 out_of_date[i].slug for i in range(len(errors)) if errors[i] is not None | ||||||
|             ]: |             ]: | ||||||
|                 raise JobConditionException( |                 raise JobConditionException( | ||||||
|                     f"'{self._method.__qualname__}' blocked from execution, was unable to update plugin(s) {', '.join(update_failures)} and all plugins must be up to date first" |                     f"'{method_name}' blocked from execution, was unable to update plugin(s) {', '.join(update_failures)} and all plugins must be up to date first" | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             JobCondition.MOUNT_AVAILABLE in used_conditions |             JobCondition.MOUNT_AVAILABLE in used_conditions | ||||||
|             and HostFeature.MOUNT not in self.sys_host.features |             and HostFeature.MOUNT not in coresys.sys_host.features | ||||||
|         ): |         ): | ||||||
|             raise JobConditionException( |             raise JobConditionException( | ||||||
|                 f"'{self._method.__qualname__}' blocked from execution, mounting not supported on system" |                 f"'{method_name}' blocked from execution, mounting not supported on system" | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|     async def _acquire_exection_limit(self) -> None: |     async def _acquire_exection_limit(self) -> None: | ||||||
|   | |||||||
| @@ -115,5 +115,5 @@ class Scheduler(CoreSysAttributes): | |||||||
|         try: |         try: | ||||||
|             async with async_timeout.timeout(timeout): |             async with async_timeout.timeout(timeout): | ||||||
|                 await asyncio.wait(running) |                 await asyncio.wait(running) | ||||||
|         except asyncio.TimeoutError: |         except TimeoutError: | ||||||
|             _LOGGER.error("Timeout while waiting for jobs shutdown") |             _LOGGER.error("Timeout while waiting for jobs shutdown") | ||||||
|   | |||||||
| @@ -7,6 +7,7 @@ from ..addons.const import ADDON_UPDATE_CONDITIONS | |||||||
| from ..const import AddonState | from ..const import AddonState | ||||||
| from ..coresys import CoreSysAttributes | from ..coresys import CoreSysAttributes | ||||||
| from ..exceptions import AddonsError, HomeAssistantError, ObserverError | from ..exceptions import AddonsError, HomeAssistantError, ObserverError | ||||||
|  | from ..homeassistant.const import LANDINGPAGE | ||||||
| from ..jobs.decorator import Job, JobCondition | from ..jobs.decorator import Job, JobCondition | ||||||
| from ..plugins.const import PLUGIN_UPDATE_CONDITIONS | from ..plugins.const import PLUGIN_UPDATE_CONDITIONS | ||||||
| from ..utils.sentry import capture_exception | from ..utils.sentry import capture_exception | ||||||
| @@ -14,6 +15,8 @@ from ..utils.sentry import capture_exception | |||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| HASS_WATCHDOG_API = "HASS_WATCHDOG_API" | HASS_WATCHDOG_API = "HASS_WATCHDOG_API" | ||||||
|  | HASS_WATCHDOG_REANIMATE_FAILURES = "HASS_WATCHDOG_REANIMATE_FAILURES" | ||||||
|  | HASS_WATCHDOG_MAX_REANIMATE_ATTEMPTS = 5 | ||||||
|  |  | ||||||
| RUN_UPDATE_SUPERVISOR = 29100 | RUN_UPDATE_SUPERVISOR = 29100 | ||||||
| RUN_UPDATE_ADDONS = 57600 | RUN_UPDATE_ADDONS = 57600 | ||||||
| @@ -102,7 +105,7 @@ class Tasks(CoreSysAttributes): | |||||||
|             # avoid issue on slow IO |             # avoid issue on slow IO | ||||||
|             _LOGGER.info("Add-on auto update process %s", addon.slug) |             _LOGGER.info("Add-on auto update process %s", addon.slug) | ||||||
|             try: |             try: | ||||||
|                 if start_task := await addon.update(backup=True): |                 if start_task := await self.sys_addons.update(addon.slug, backup=True): | ||||||
|                     start_tasks.append(start_task) |                     start_tasks.append(start_task) | ||||||
|             except AddonsError: |             except AddonsError: | ||||||
|                 _LOGGER.error("Can't auto update Add-on %s", addon.slug) |                 _LOGGER.error("Can't auto update Add-on %s", addon.slug) | ||||||
| @@ -142,6 +145,9 @@ class Tasks(CoreSysAttributes): | |||||||
|         if self.sys_homeassistant.error_state: |         if self.sys_homeassistant.error_state: | ||||||
|             # Home Assistant is in an error state, this is handled by the rollback feature |             # Home Assistant is in an error state, this is handled by the rollback feature | ||||||
|             return |             return | ||||||
|  |         if self.sys_homeassistant.version == LANDINGPAGE: | ||||||
|  |             # Skip watchdog for landingpage | ||||||
|  |             return | ||||||
|         if not await self.sys_homeassistant.core.is_running(): |         if not await self.sys_homeassistant.core.is_running(): | ||||||
|             # The home assistant container is not running |             # The home assistant container is not running | ||||||
|             return |             return | ||||||
| @@ -150,6 +156,18 @@ class Tasks(CoreSysAttributes): | |||||||
|             return |             return | ||||||
|         if await self.sys_homeassistant.api.check_api_state(): |         if await self.sys_homeassistant.api.check_api_state(): | ||||||
|             # Home Assistant is running properly |             # Home Assistant is running properly | ||||||
|  |             self._cache[HASS_WATCHDOG_REANIMATE_FAILURES] = 0 | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Give up after 5 reanimation failures in a row. Supervisor cannot fix this issue. | ||||||
|  |         reanimate_fails = self._cache.get(HASS_WATCHDOG_REANIMATE_FAILURES, 0) | ||||||
|  |         if reanimate_fails >= HASS_WATCHDOG_MAX_REANIMATE_ATTEMPTS: | ||||||
|  |             if reanimate_fails == HASS_WATCHDOG_MAX_REANIMATE_ATTEMPTS: | ||||||
|  |                 _LOGGER.critical( | ||||||
|  |                     "Watchdog cannot reanimate Home Assistant, failed all %s attempts.", | ||||||
|  |                     reanimate_fails, | ||||||
|  |                 ) | ||||||
|  |                 self._cache[HASS_WATCHDOG_REANIMATE_FAILURES] += 1 | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         # Init cache data |         # Init cache data | ||||||
| @@ -167,7 +185,11 @@ class Tasks(CoreSysAttributes): | |||||||
|             await self.sys_homeassistant.core.restart() |             await self.sys_homeassistant.core.restart() | ||||||
|         except HomeAssistantError as err: |         except HomeAssistantError as err: | ||||||
|             _LOGGER.error("Home Assistant watchdog reanimation failed!") |             _LOGGER.error("Home Assistant watchdog reanimation failed!") | ||||||
|  |             if reanimate_fails == 0: | ||||||
|                 capture_exception(err) |                 capture_exception(err) | ||||||
|  |             self._cache[HASS_WATCHDOG_REANIMATE_FAILURES] = reanimate_fails + 1 | ||||||
|  |         else: | ||||||
|  |             self._cache[HASS_WATCHDOG_REANIMATE_FAILURES] = 0 | ||||||
|         finally: |         finally: | ||||||
|             self._cache[HASS_WATCHDOG_API] = 0 |             self._cache[HASS_WATCHDOG_API] = 0 | ||||||
|  |  | ||||||
|   | |||||||
| @@ -170,6 +170,16 @@ class Mount(CoreSysAttributes, ABC): | |||||||
|         elif self.state != UnitActiveState.ACTIVE: |         elif self.state != UnitActiveState.ACTIVE: | ||||||
|             await self.reload() |             await self.reload() | ||||||
|  |  | ||||||
|  |     async def update_state(self) -> None: | ||||||
|  |         """Update mount unit state.""" | ||||||
|  |         try: | ||||||
|  |             self._state = await self.unit.get_active_state() | ||||||
|  |         except DBusError as err: | ||||||
|  |             capture_exception(err) | ||||||
|  |             raise MountError( | ||||||
|  |                 f"Could not get active state of mount due to: {err!s}" | ||||||
|  |             ) from err | ||||||
|  |  | ||||||
|     async def update(self) -> None: |     async def update(self) -> None: | ||||||
|         """Update info about mount from dbus.""" |         """Update info about mount from dbus.""" | ||||||
|         try: |         try: | ||||||
| @@ -182,13 +192,7 @@ class Mount(CoreSysAttributes, ABC): | |||||||
|             capture_exception(err) |             capture_exception(err) | ||||||
|             raise MountError(f"Could not get mount unit due to: {err!s}") from err |             raise MountError(f"Could not get mount unit due to: {err!s}") from err | ||||||
|  |  | ||||||
|         try: |         await self.update_state() | ||||||
|             self._state = await self.unit.get_active_state() |  | ||||||
|         except DBusError as err: |  | ||||||
|             capture_exception(err) |  | ||||||
|             raise MountError( |  | ||||||
|                 f"Could not get active state of mount due to: {err!s}" |  | ||||||
|             ) from err |  | ||||||
|  |  | ||||||
|         # If active, dismiss corresponding failed mount issue if found |         # If active, dismiss corresponding failed mount issue if found | ||||||
|         if ( |         if ( | ||||||
| @@ -197,6 +201,20 @@ class Mount(CoreSysAttributes, ABC): | |||||||
|         ): |         ): | ||||||
|             self.sys_resolution.dismiss_issue(self.failed_issue) |             self.sys_resolution.dismiss_issue(self.failed_issue) | ||||||
|  |  | ||||||
|  |     async def _update_state_await(self, expected_states: list[UnitActiveState]) -> None: | ||||||
|  |         """Update state info about mount from dbus. Wait up to 30 seconds for the state to appear.""" | ||||||
|  |         for i in range(5): | ||||||
|  |             await self.update_state() | ||||||
|  |             if self.state in expected_states: | ||||||
|  |                 return | ||||||
|  |             await asyncio.sleep(i**2) | ||||||
|  |  | ||||||
|  |         _LOGGER.warning( | ||||||
|  |             "Mount %s still in state %s after waiting for 30 seconods to complete", | ||||||
|  |             self.name, | ||||||
|  |             str(self.state).lower(), | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     async def _update_await_activating(self): |     async def _update_await_activating(self): | ||||||
|         """Update info about mount from dbus. If 'activating' wait up to 30 seconds.""" |         """Update info about mount from dbus. If 'activating' wait up to 30 seconds.""" | ||||||
|         await self.update() |         await self.update() | ||||||
| @@ -269,10 +287,15 @@ class Mount(CoreSysAttributes, ABC): | |||||||
|         await self.update() |         await self.update() | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|  |             if self.state != UnitActiveState.FAILED: | ||||||
|  |                 await self.sys_dbus.systemd.stop_unit(self.unit_name, StopUnitMode.FAIL) | ||||||
|  |  | ||||||
|  |             await self._update_state_await( | ||||||
|  |                 [UnitActiveState.INACTIVE, UnitActiveState.FAILED] | ||||||
|  |             ) | ||||||
|  |  | ||||||
|             if self.state == UnitActiveState.FAILED: |             if self.state == UnitActiveState.FAILED: | ||||||
|                 await self.sys_dbus.systemd.reset_failed_unit(self.unit_name) |                 await self.sys_dbus.systemd.reset_failed_unit(self.unit_name) | ||||||
|             else: |  | ||||||
|                 await self.sys_dbus.systemd.stop_unit(self.unit_name, StopUnitMode.FAIL) |  | ||||||
|         except DBusSystemdNoSuchUnit: |         except DBusSystemdNoSuchUnit: | ||||||
|             _LOGGER.info("Mount %s is not mounted, skipping unmount", self.name) |             _LOGGER.info("Mount %s is not mounted, skipping unmount", self.name) | ||||||
|         except DBusError as err: |         except DBusError as err: | ||||||
|   | |||||||
| @@ -1,9 +1,8 @@ | |||||||
| """Validation for mount manager.""" | """Validation for mount manager.""" | ||||||
|  |  | ||||||
| import re | import re | ||||||
| from typing import TypedDict | from typing import NotRequired, TypedDict | ||||||
|  |  | ||||||
| from typing_extensions import NotRequired |  | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from ..const import ( | from ..const import ( | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| """OS support on supervisor.""" | """OS support on supervisor.""" | ||||||
| import asyncio |  | ||||||
| from collections.abc import Awaitable | from collections.abc import Awaitable | ||||||
|  | import errno | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
|  |  | ||||||
| @@ -14,6 +14,7 @@ from ..dbus.rauc import RaucState | |||||||
| from ..exceptions import DBusError, HassOSJobError, HassOSUpdateError | from ..exceptions import DBusError, HassOSJobError, HassOSUpdateError | ||||||
| from ..jobs.const import JobCondition, JobExecutionLimit | from ..jobs.const import JobCondition, JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
| from .data_disk import DataDisk | from .data_disk import DataDisk | ||||||
|  |  | ||||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||||
| @@ -114,13 +115,15 @@ class OSManager(CoreSysAttributes): | |||||||
|  |  | ||||||
|             _LOGGER.info("Completed download of OTA update file %s", raucb) |             _LOGGER.info("Completed download of OTA update file %s", raucb) | ||||||
|  |  | ||||||
|         except (aiohttp.ClientError, asyncio.TimeoutError) as err: |         except (aiohttp.ClientError, TimeoutError) as err: | ||||||
|             self.sys_supervisor.connectivity = False |             self.sys_supervisor.connectivity = False | ||||||
|             raise HassOSUpdateError( |             raise HassOSUpdateError( | ||||||
|                 f"Can't fetch OTA update from {url}: {err!s}", _LOGGER.error |                 f"Can't fetch OTA update from {url}: {err!s}", _LOGGER.error | ||||||
|             ) from err |             ) from err | ||||||
|  |  | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             raise HassOSUpdateError( |             raise HassOSUpdateError( | ||||||
|                 f"Can't write OTA file: {err!s}", _LOGGER.error |                 f"Can't write OTA file: {err!s}", _LOGGER.error | ||||||
|             ) from err |             ) from err | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ Code: https://github.com/home-assistant/plugin-audio | |||||||
| """ | """ | ||||||
| import asyncio | import asyncio | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
|  | import errno | ||||||
| import logging | import logging | ||||||
| from pathlib import Path, PurePath | from pathlib import Path, PurePath | ||||||
| import shutil | import shutil | ||||||
| @@ -25,6 +26,7 @@ from ..exceptions import ( | |||||||
| ) | ) | ||||||
| from ..jobs.const import JobExecutionLimit | from ..jobs.const import JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
|  | from ..resolution.const import UnhealthyReason | ||||||
| from ..utils.json import write_json_file | from ..utils.json import write_json_file | ||||||
| from ..utils.sentry import capture_exception | from ..utils.sentry import capture_exception | ||||||
| from .base import PluginBase | from .base import PluginBase | ||||||
| @@ -83,6 +85,9 @@ class PluginAudio(PluginBase): | |||||||
|                 PULSE_CLIENT_TMPL.read_text(encoding="utf-8") |                 PULSE_CLIENT_TMPL.read_text(encoding="utf-8") | ||||||
|             ) |             ) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|  |  | ||||||
|             _LOGGER.error("Can't read pulse-client.tmpl: %s", err) |             _LOGGER.error("Can't read pulse-client.tmpl: %s", err) | ||||||
|  |  | ||||||
|         await super().load() |         await super().load() | ||||||
| @@ -93,6 +98,8 @@ class PluginAudio(PluginBase): | |||||||
|             try: |             try: | ||||||
|                 shutil.copy(ASOUND_TMPL, asound) |                 shutil.copy(ASOUND_TMPL, asound) | ||||||
|             except OSError as err: |             except OSError as err: | ||||||
|  |                 if err.errno == errno.EBADMSG: | ||||||
|  |                     self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|                 _LOGGER.error("Can't create default asound: %s", err) |                 _LOGGER.error("Can't create default asound: %s", err) | ||||||
|  |  | ||||||
|     async def install(self) -> None: |     async def install(self) -> None: | ||||||
|   | |||||||
| @@ -105,11 +105,7 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes): | |||||||
|         if not (event.name == self.instance.name): |         if not (event.name == self.instance.name): | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         if event.state in [ |         if event.state in {ContainerState.FAILED, ContainerState.UNHEALTHY}: | ||||||
|             ContainerState.FAILED, |  | ||||||
|             ContainerState.STOPPED, |  | ||||||
|             ContainerState.UNHEALTHY, |  | ||||||
|         ]: |  | ||||||
|             await self._restart_after_problem(event.state) |             await self._restart_after_problem(event.state) | ||||||
|  |  | ||||||
|     async def _restart_after_problem(self, state: ContainerState): |     async def _restart_after_problem(self, state: ContainerState): | ||||||
| @@ -123,9 +119,6 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes): | |||||||
|                     state, |                     state, | ||||||
|                 ) |                 ) | ||||||
|                 try: |                 try: | ||||||
|                     if state == ContainerState.STOPPED and attempts == 0: |  | ||||||
|                         await self.start() |  | ||||||
|                     else: |  | ||||||
|                     await self.rebuild() |                     await self.rebuild() | ||||||
|                 except PluginError as err: |                 except PluginError as err: | ||||||
|                     attempts = attempts + 1 |                     attempts = attempts + 1 | ||||||
|   | |||||||
| @@ -66,7 +66,7 @@ class PluginCli(PluginBase): | |||||||
|                         image=self.sys_updater.image_cli, |                         image=self.sys_updater.image_cli, | ||||||
|                     ) |                     ) | ||||||
|                     break |                     break | ||||||
|             _LOGGER.warning("Error on install cli plugin. Retry in 30sec") |             _LOGGER.warning("Error on install cli plugin. Retrying in 30sec") | ||||||
|             await asyncio.sleep(30) |             await asyncio.sleep(30) | ||||||
|  |  | ||||||
|         _LOGGER.info("CLI plugin is now installed") |         _LOGGER.info("CLI plugin is now installed") | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ Code: https://github.com/home-assistant/plugin-dns | |||||||
| """ | """ | ||||||
| import asyncio | import asyncio | ||||||
| from contextlib import suppress | from contextlib import suppress | ||||||
|  | import errno | ||||||
| from ipaddress import IPv4Address | from ipaddress import IPv4Address | ||||||
| import logging | import logging | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| @@ -29,7 +30,7 @@ from ..exceptions import ( | |||||||
| ) | ) | ||||||
| from ..jobs.const import JobExecutionLimit | from ..jobs.const import JobExecutionLimit | ||||||
| from ..jobs.decorator import Job | from ..jobs.decorator import Job | ||||||
| from ..resolution.const import ContextType, IssueType, SuggestionType | from ..resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason | ||||||
| from ..utils.json import write_json_file | from ..utils.json import write_json_file | ||||||
| from ..utils.sentry import capture_exception | from ..utils.sentry import capture_exception | ||||||
| from ..validate import dns_url | from ..validate import dns_url | ||||||
| @@ -146,12 +147,16 @@ class PluginDns(PluginBase): | |||||||
|                 RESOLV_TMPL.read_text(encoding="utf-8") |                 RESOLV_TMPL.read_text(encoding="utf-8") | ||||||
|             ) |             ) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error("Can't read resolve.tmpl: %s", err) |             _LOGGER.error("Can't read resolve.tmpl: %s", err) | ||||||
|         try: |         try: | ||||||
|             self.hosts_template = jinja2.Template( |             self.hosts_template = jinja2.Template( | ||||||
|                 HOSTS_TMPL.read_text(encoding="utf-8") |                 HOSTS_TMPL.read_text(encoding="utf-8") | ||||||
|             ) |             ) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.error("Can't read hosts.tmpl: %s", err) |             _LOGGER.error("Can't read hosts.tmpl: %s", err) | ||||||
|  |  | ||||||
|         await self._init_hosts() |         await self._init_hosts() | ||||||
| @@ -175,7 +180,7 @@ class PluginDns(PluginBase): | |||||||
|                         self.latest_version, image=self.sys_updater.image_dns |                         self.latest_version, image=self.sys_updater.image_dns | ||||||
|                     ) |                     ) | ||||||
|                     break |                     break | ||||||
|             _LOGGER.warning("Error on install CoreDNS plugin. Retry in 30sec") |             _LOGGER.warning("Error on install CoreDNS plugin. Retrying in 30sec") | ||||||
|             await asyncio.sleep(30) |             await asyncio.sleep(30) | ||||||
|  |  | ||||||
|         _LOGGER.info("CoreDNS plugin now installed") |         _LOGGER.info("CoreDNS plugin now installed") | ||||||
| @@ -364,6 +369,8 @@ class PluginDns(PluginBase): | |||||||
|                 self.hosts.write_text, data, encoding="utf-8" |                 self.hosts.write_text, data, encoding="utf-8" | ||||||
|             ) |             ) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             raise CoreDNSError(f"Can't update hosts: {err}", _LOGGER.error) from err |             raise CoreDNSError(f"Can't update hosts: {err}", _LOGGER.error) from err | ||||||
|  |  | ||||||
|     async def add_host( |     async def add_host( | ||||||
| @@ -436,6 +443,12 @@ class PluginDns(PluginBase): | |||||||
|  |  | ||||||
|     def _write_resolv(self, resolv_conf: Path) -> None: |     def _write_resolv(self, resolv_conf: Path) -> None: | ||||||
|         """Update/Write resolv.conf file.""" |         """Update/Write resolv.conf file.""" | ||||||
|  |         if not self.resolv_template: | ||||||
|  |             _LOGGER.warning( | ||||||
|  |                 "Resolv template is missing, cannot write/update %s", resolv_conf | ||||||
|  |             ) | ||||||
|  |             return | ||||||
|  |  | ||||||
|         nameservers = [str(self.sys_docker.network.dns), "127.0.0.11"] |         nameservers = [str(self.sys_docker.network.dns), "127.0.0.11"] | ||||||
|  |  | ||||||
|         # Read resolv config |         # Read resolv config | ||||||
| @@ -445,6 +458,8 @@ class PluginDns(PluginBase): | |||||||
|         try: |         try: | ||||||
|             resolv_conf.write_text(data) |             resolv_conf.write_text(data) | ||||||
|         except OSError as err: |         except OSError as err: | ||||||
|  |             if err.errno == errno.EBADMSG: | ||||||
|  |                 self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE | ||||||
|             _LOGGER.warning("Can't write/update %s: %s", resolv_conf, err) |             _LOGGER.warning("Can't write/update %s: %s", resolv_conf, err) | ||||||
|             return |             return | ||||||
|  |  | ||||||
|   | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user