mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-18 13:39:21 +00:00
Compare commits
204 Commits
trigger-sy
...
2025.02.4
Author | SHA1 | Date | |
---|---|---|---|
![]() |
42e78408a7 | ||
![]() |
15e8940c7f | ||
![]() |
644ec45ded | ||
![]() |
a8d2743f56 | ||
![]() |
0acef4a6e6 | ||
![]() |
5733db94aa | ||
![]() |
da8c6cf111 | ||
![]() |
802ee25a8b | ||
![]() |
ce8b107f1e | ||
![]() |
32936e5de0 | ||
![]() |
c35746c3e1 | ||
![]() |
392dd9f904 | ||
![]() |
d8f792950b | ||
![]() |
1f6cdc3018 | ||
![]() |
616f1903b7 | ||
![]() |
997a51fc42 | ||
![]() |
cda6325be4 | ||
![]() |
c8cc6fe003 | ||
![]() |
34939cfe52 | ||
![]() |
37bc703bbb | ||
![]() |
5f8e41b441 | ||
![]() |
606db3585c | ||
![]() |
4054749eb2 | ||
![]() |
ad5827d33f | ||
![]() |
249464e928 | ||
![]() |
3bc55c054a | ||
![]() |
4c108eea64 | ||
![]() |
9b2dbd634d | ||
![]() |
2cb2a48184 | ||
![]() |
ed5a0b511e | ||
![]() |
1475dcb50b | ||
![]() |
5cd7f6fd84 | ||
![]() |
52cc17fa3f | ||
![]() |
fa6949f4e4 | ||
![]() |
63a4cee770 | ||
![]() |
7aed0c1b0d | ||
![]() |
de592a6ef4 | ||
![]() |
ff7086c0d0 | ||
![]() |
ef0352ecd6 | ||
![]() |
7348745049 | ||
![]() |
2078044062 | ||
![]() |
d254937590 | ||
![]() |
9a8e52d1fc | ||
![]() |
6e7fac5493 | ||
![]() |
129a37a1f4 | ||
![]() |
01382e774e | ||
![]() |
9164d35615 | ||
![]() |
58df65541c | ||
![]() |
4c04f364a3 | ||
![]() |
7f39538231 | ||
![]() |
be98e0c0f4 | ||
![]() |
9491b1ff89 | ||
![]() |
30cbb039d0 | ||
![]() |
1aabca9489 | ||
![]() |
28a87db515 | ||
![]() |
05b648629f | ||
![]() |
d1d8446480 | ||
![]() |
8b897ba537 | ||
![]() |
c8f1b222c0 | ||
![]() |
257e2ceb82 | ||
![]() |
67a27cae40 | ||
![]() |
8ff9c08e82 | ||
![]() |
1b0aa30881 | ||
![]() |
2a8d2d2b48 | ||
![]() |
44bd787276 | ||
![]() |
690f1c07a7 | ||
![]() |
8e185a8413 | ||
![]() |
1f7df73964 | ||
![]() |
a10afc45b1 | ||
![]() |
61a2101d8a | ||
![]() |
088832c253 | ||
![]() |
a545b680b3 | ||
![]() |
805017eabf | ||
![]() |
b7412b0679 | ||
![]() |
fff3bfd01e | ||
![]() |
5f165a79ba | ||
![]() |
0d3acd1aca | ||
![]() |
463f196472 | ||
![]() |
52d5df6778 | ||
![]() |
ce75c85e65 | ||
![]() |
12fd61142d | ||
![]() |
0073227785 | ||
![]() |
89a215cc1f | ||
![]() |
b2aece8208 | ||
![]() |
600bf91c4f | ||
![]() |
da6bdfa795 | ||
![]() |
5d4894a1ba | ||
![]() |
d4c047bd01 | ||
![]() |
6183b9719c | ||
![]() |
f02d67ee47 | ||
![]() |
bd156ebb53 | ||
![]() |
b07236b544 | ||
![]() |
5928a31fc4 | ||
![]() |
3a71ea7003 | ||
![]() |
96900b1f1b | ||
![]() |
65b39661a6 | ||
![]() |
18251ae8ae | ||
![]() |
c418e0ea76 | ||
![]() |
74b009ccd7 | ||
![]() |
d2631bf398 | ||
![]() |
c62358d851 | ||
![]() |
e3af04701a | ||
![]() |
c2f6e319f2 | ||
![]() |
61b37877be | ||
![]() |
e72c5a037b | ||
![]() |
578383411c | ||
![]() |
dbd37d6575 | ||
![]() |
c7cf1e7593 | ||
![]() |
c06fb069ab | ||
![]() |
b6c2259bd7 | ||
![]() |
d0b7cc8ab3 | ||
![]() |
0f77021bcc | ||
![]() |
b44e6d8cd3 | ||
![]() |
dfe9e94f87 | ||
![]() |
53ccc5249a | ||
![]() |
5993818c16 | ||
![]() |
a631dea01a | ||
![]() |
c5b85b2831 | ||
![]() |
3c1920e4e1 | ||
![]() |
ca6ae7f4ce | ||
![]() |
031ad0dbe6 | ||
![]() |
d8101ddba8 | ||
![]() |
de68868788 | ||
![]() |
90590ae2de | ||
![]() |
5e6bef7189 | ||
![]() |
7ab5555087 | ||
![]() |
02ceb713ea | ||
![]() |
774aef74e8 | ||
![]() |
045454b597 | ||
![]() |
829193fe84 | ||
![]() |
1f893117cc | ||
![]() |
9008009727 | ||
![]() |
3bf3bffabf | ||
![]() |
d44e995aed | ||
![]() |
5a22599b93 | ||
![]() |
ae60e947f3 | ||
![]() |
8115fd98bc | ||
![]() |
3201061ada | ||
![]() |
b68caecbce | ||
![]() |
5e780293c7 | ||
![]() |
6e32144e9a | ||
![]() |
9b52fee0a3 | ||
![]() |
7af4b17430 | ||
![]() |
4195c0fb33 | ||
![]() |
8fe1cfbb20 | ||
![]() |
623c532c9e | ||
![]() |
3a904383af | ||
![]() |
28299affef | ||
![]() |
11ca772ada | ||
![]() |
42e704d563 | ||
![]() |
ec7241c0fd | ||
![]() |
d11d59dd92 | ||
![]() |
7a55f58a5f | ||
![]() |
0b5b5f7fd4 | ||
![]() |
56f3d384d6 | ||
![]() |
29117bb90b | ||
![]() |
5519f6a53b | ||
![]() |
a45d507bee | ||
![]() |
0a663b5c27 | ||
![]() |
0f1fed525c | ||
![]() |
209cddc843 | ||
![]() |
4e0de93096 | ||
![]() |
3b6c5d5d33 | ||
![]() |
0843971e95 | ||
![]() |
12d7496cd1 | ||
![]() |
ed34348c80 | ||
![]() |
fefb83558a | ||
![]() |
93a0ae4030 | ||
![]() |
5394cff296 | ||
![]() |
ca3e6da943 | ||
![]() |
756a5f8836 | ||
![]() |
a8e7bb670e | ||
![]() |
687d7652a0 | ||
![]() |
9f414ee9da | ||
![]() |
67c2f8eb83 | ||
![]() |
c033d5ce8d | ||
![]() |
fd056f3840 | ||
![]() |
e3488b8a08 | ||
![]() |
e1e5d3a8f2 | ||
![]() |
473662e56d | ||
![]() |
b29bc23487 | ||
![]() |
54817ef562 | ||
![]() |
dd8abf738e | ||
![]() |
55e58d39d9 | ||
![]() |
ac5ce4cc9e | ||
![]() |
2525467a2e | ||
![]() |
81066aab83 | ||
![]() |
93f4b24e72 | ||
![]() |
9a07ff7fc4 | ||
![]() |
1a278f2590 | ||
![]() |
93472ed6dd | ||
![]() |
dcaf2653b8 | ||
![]() |
0714d7845a | ||
![]() |
8f2269d871 | ||
![]() |
c7487e004d | ||
![]() |
09d3edf526 | ||
![]() |
9c99bf368f | ||
![]() |
6f196c9dea | ||
![]() |
fcac17f335 | ||
![]() |
f5a026cdd8 | ||
![]() |
c6488c1ee3 | ||
![]() |
f47d0d2867 | ||
![]() |
96df335b36 | ||
![]() |
cc9a931baa |
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "Supervisor dev",
|
"name": "Supervisor dev",
|
||||||
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
|
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
|
||||||
"containerEnv": {
|
"containerEnv": {
|
||||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||||
},
|
},
|
||||||
@@ -44,5 +44,8 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"mounts": ["type=volume,target=/var/lib/docker"]
|
"mounts": [
|
||||||
|
"type=volume,target=/var/lib/docker",
|
||||||
|
"type=volume,target=/mnt/supervisor"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
18
.github/workflows/builder.yml
vendored
18
.github/workflows/builder.yml
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
- setup.py
|
- setup.py
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
BUILD_NAME: supervisor
|
BUILD_NAME: supervisor
|
||||||
BUILD_TYPE: supervisor
|
BUILD_TYPE: supervisor
|
||||||
|
|
||||||
@@ -53,7 +53,7 @@ jobs:
|
|||||||
requirements: ${{ steps.requirements.outputs.changed }}
|
requirements: ${{ steps.requirements.outputs.changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -106,9 +106,9 @@ jobs:
|
|||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
if: needs.init.outputs.requirements == 'true'
|
if: needs.init.outputs.requirements == 'true'
|
||||||
uses: home-assistant/wheels@2024.07.1
|
uses: home-assistant/wheels@2024.11.0
|
||||||
with:
|
with:
|
||||||
abi: cp312
|
abi: cp313
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
@@ -125,13 +125,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: sigstore/cosign-installer@v3.7.0
|
uses: sigstore/cosign-installer@v3.8.1
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.4.0"
|
cosign-release: "v2.4.0"
|
||||||
|
|
||||||
@@ -178,7 +178,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
@@ -203,7 +203,7 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Build the Supervisor
|
- name: Build the Supervisor
|
||||||
if: needs.init.outputs.publish != 'true'
|
if: needs.init.outputs.publish != 'true'
|
||||||
|
72
.github/workflows/ci.yaml
vendored
72
.github/workflows/ci.yaml
vendored
@@ -8,7 +8,7 @@ on:
|
|||||||
pull_request: ~
|
pull_request: ~
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
@@ -25,15 +25,15 @@ jobs:
|
|||||||
name: Prepare Python dependencies
|
name: Prepare Python dependencies
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
pip install -r requirements.txt -r requirements_tests.txt
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@@ -67,15 +67,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -110,15 +110,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -130,7 +130,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -153,7 +153,7 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -168,15 +168,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -188,7 +188,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -212,15 +212,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -232,7 +232,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -256,15 +256,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -274,6 +274,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
|
- name: Install additional system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends libpulse0
|
||||||
- name: Register pylint problem matcher
|
- name: Register pylint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||||
@@ -288,19 +292,19 @@ jobs:
|
|||||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.7.0
|
uses: sigstore/cosign-installer@v3.8.1
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.4.0"
|
cosign-release: "v2.4.0"
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -335,7 +339,7 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
tests
|
tests
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.1
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}
|
name: coverage-${{ matrix.python-version }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
@@ -347,15 +351,15 @@ jobs:
|
|||||||
needs: ["pytest", "prepare"]
|
needs: ["pytest", "prepare"]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.1
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -374,4 +378,4 @@ jobs:
|
|||||||
coverage report
|
coverage report
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4.6.0
|
uses: codecov/codecov-action@v5.3.1
|
||||||
|
4
.github/workflows/release-drafter.yml
vendored
4
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Release Drafter
|
name: Release Drafter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Run Release Drafter
|
- name: Run Release Drafter
|
||||||
uses: release-drafter/release-drafter@v6.0.0
|
uses: release-drafter/release-drafter@v6.1.0
|
||||||
with:
|
with:
|
||||||
tag: ${{ steps.version.outputs.version }}
|
tag: ${{ steps.version.outputs.version }}
|
||||||
name: ${{ steps.version.outputs.version }}
|
name: ${{ steps.version.outputs.version }}
|
||||||
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Sentry Release
|
- name: Sentry Release
|
||||||
uses: getsentry/action-release@v1.7.0
|
uses: getsentry/action-release@v1.10.4
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9.0.0
|
- uses: actions/stale@v9.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 30
|
days-before-stale: 30
|
||||||
|
74
.github/workflows/update_frontend.yml
vendored
Normal file
74
.github/workflows/update_frontend.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
name: Update frontend
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule: # once a day
|
||||||
|
- cron: "0 0 * * *"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
|
||||||
|
latest_tag: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Get latest frontend release
|
||||||
|
id: latest_frontend_version
|
||||||
|
uses: abatilo/release-info-action@v1.3.3
|
||||||
|
with:
|
||||||
|
owner: home-assistant
|
||||||
|
repo: frontend
|
||||||
|
- name: Check if version is up to date
|
||||||
|
id: check_version
|
||||||
|
run: |
|
||||||
|
SUPERVISOR_VERSION=$(cat .ha-frontend-version)
|
||||||
|
LATEST_VERSION=${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||||
|
echo "SUPERVISOR_VERSION=$SUPERVISOR_VERSION" >> $GITHUB_ENV
|
||||||
|
echo "LATEST_VERSION=$LATEST_VERSION" >> $GITHUB_ENV
|
||||||
|
if [[ ! "$SUPERVISOR_VERSION" < "$LATEST_VERSION" ]]; then
|
||||||
|
echo "Frontend version is up to date"
|
||||||
|
echo "skip=true" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: Check if there is no open PR with this version
|
||||||
|
if: steps.check_version.outputs.skip != 'true'
|
||||||
|
id: check_existing_pr
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
PR=$(gh pr list --state open --base main --json title --search "Autoupdate frontend to version $LATEST_VERSION")
|
||||||
|
if [[ "$PR" != "[]" ]]; then
|
||||||
|
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
|
||||||
|
echo "skip=true" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
create-pr:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: check-version
|
||||||
|
if: needs.check-version.outputs.skip != 'true'
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Clear www folder
|
||||||
|
run: |
|
||||||
|
rm -rf supervisor/api/panel/*
|
||||||
|
- name: Update version file
|
||||||
|
run: |
|
||||||
|
echo "${{ needs.check-version.outputs.latest_tag }}" > .ha-frontend-version
|
||||||
|
- name: Download release assets
|
||||||
|
uses: robinraju/release-downloader@v1
|
||||||
|
with:
|
||||||
|
repository: 'home-assistant/frontend'
|
||||||
|
tag: ${{ needs.check-version.outputs.latest_tag }}
|
||||||
|
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_tag }}.tar.gz
|
||||||
|
extract: true
|
||||||
|
out-file-path: supervisor/api/panel/
|
||||||
|
- name: Create PR
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
with:
|
||||||
|
commit-message: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"
|
||||||
|
branch: autoupdate-frontend
|
||||||
|
base: main
|
||||||
|
draft: true
|
||||||
|
sign-commits: true
|
||||||
|
title: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"
|
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -1,4 +0,0 @@
|
|||||||
[submodule "home-assistant-polymer"]
|
|
||||||
path = home-assistant-polymer
|
|
||||||
url = https://github.com/home-assistant/home-assistant-polymer
|
|
||||||
branch = dev
|
|
1
.ha-frontend-version
Normal file
1
.ha-frontend-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
20250221.0
|
@@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.5.7
|
rev: v0.9.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
@@ -8,7 +8,7 @@ repos:
|
|||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.5.0
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
|
12
Dockerfile
12
Dockerfile
@@ -9,7 +9,8 @@ ENV \
|
|||||||
|
|
||||||
ARG \
|
ARG \
|
||||||
COSIGN_VERSION \
|
COSIGN_VERSION \
|
||||||
BUILD_ARCH
|
BUILD_ARCH \
|
||||||
|
QEMU_CPU
|
||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
@@ -28,22 +29,23 @@ RUN \
|
|||||||
\
|
\
|
||||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||||
&& chmod a+x /usr/bin/cosign \
|
&& chmod a+x /usr/bin/cosign \
|
||||||
&& pip3 install uv==0.2.21
|
&& pip3 install uv==0.6.1
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN \
|
RUN \
|
||||||
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||||
linux32 uv pip install --no-build -r requirements.txt; \
|
setarch="linux32"; \
|
||||||
else \
|
else \
|
||||||
uv pip install --no-build -r requirements.txt; \
|
setarch=""; \
|
||||||
fi \
|
fi \
|
||||||
|
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
|
||||||
&& rm -f requirements.txt
|
&& rm -f requirements.txt
|
||||||
|
|
||||||
# Install Home Assistant Supervisor
|
# Install Home Assistant Supervisor
|
||||||
COPY . supervisor
|
COPY . supervisor
|
||||||
RUN \
|
RUN \
|
||||||
pip3 install -e ./supervisor \
|
uv pip install --no-cache -e ./supervisor \
|
||||||
&& python3 -m compileall ./supervisor/supervisor
|
&& python3 -m compileall ./supervisor/supervisor
|
||||||
|
|
||||||
|
|
||||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
|
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21
|
||||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
|
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21
|
||||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
|
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21
|
||||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
|
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21
|
||||||
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
|
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
|
Submodule home-assistant-polymer deleted from 9d457d52e8
@@ -1,5 +1,5 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
|
requires = ["setuptools~=75.8.0", "wheel~=0.45.0"]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
@@ -12,7 +12,7 @@ authors = [
|
|||||||
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
||||||
]
|
]
|
||||||
keywords = ["docker", "home-assistant", "api"]
|
keywords = ["docker", "home-assistant", "api"]
|
||||||
requires-python = ">=3.12.0"
|
requires-python = ">=3.13.0"
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
"Homepage" = "https://www.home-assistant.io/"
|
"Homepage" = "https://www.home-assistant.io/"
|
||||||
@@ -31,7 +31,7 @@ include-package-data = true
|
|||||||
include = ["supervisor*"]
|
include = ["supervisor*"]
|
||||||
|
|
||||||
[tool.pylint.MAIN]
|
[tool.pylint.MAIN]
|
||||||
py-version = "3.12"
|
py-version = "3.13"
|
||||||
# Use a conservative default here; 2 should speed up most setups and not hurt
|
# Use a conservative default here; 2 should speed up most setups and not hurt
|
||||||
# any too bad. Override on command line as appropriate.
|
# any too bad. Override on command line as appropriate.
|
||||||
jobs = 2
|
jobs = 2
|
||||||
@@ -147,7 +147,7 @@ disable = [
|
|||||||
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||||
"raise-missing-from", # TRY200
|
"raise-missing-from", # TRY200
|
||||||
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||||
"try-except-raise", # TRY302
|
"try-except-raise", # TRY203
|
||||||
"unused-argument", # ARG001, we don't use it
|
"unused-argument", # ARG001, we don't use it
|
||||||
"unused-format-string-argument", #F507
|
"unused-format-string-argument", #F507
|
||||||
"unused-format-string-key", # F504
|
"unused-format-string-key", # F504
|
||||||
@@ -223,6 +223,7 @@ testpaths = ["tests"]
|
|||||||
norecursedirs = [".git"]
|
norecursedirs = [".git"]
|
||||||
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
||||||
log_date_format = "%Y-%m-%d %H:%M:%S"
|
log_date_format = "%Y-%m-%d %H:%M:%S"
|
||||||
|
asyncio_default_fixture_loop_scope = "function"
|
||||||
asyncio_mode = "auto"
|
asyncio_mode = "auto"
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"error",
|
"error",
|
||||||
@@ -289,7 +290,7 @@ lint.select = [
|
|||||||
"T20", # flake8-print
|
"T20", # flake8-print
|
||||||
"TID251", # Banned imports
|
"TID251", # Banned imports
|
||||||
"TRY004", # Prefer TypeError exception for invalid type
|
"TRY004", # Prefer TypeError exception for invalid type
|
||||||
"TRY302", # Remove exception handler; error is immediately re-raised
|
"TRY203", # Remove exception handler; error is immediately re-raised
|
||||||
"UP", # pyupgrade
|
"UP", # pyupgrade
|
||||||
"W", # pycodestyle
|
"W", # pycodestyle
|
||||||
]
|
]
|
||||||
|
@@ -1,29 +1,29 @@
|
|||||||
aiodns==3.2.0
|
aiodns==3.2.0
|
||||||
aiohttp==3.10.10
|
aiohttp==3.11.13
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==24.2.0
|
attrs==25.1.0
|
||||||
awesomeversion==24.6.0
|
awesomeversion==24.6.0
|
||||||
brotli==1.1.0
|
brotli==1.1.0
|
||||||
ciso8601==2.3.1
|
ciso8601==2.3.2
|
||||||
colorlog==6.8.2
|
colorlog==6.9.0
|
||||||
cpe==1.3.1
|
cpe==1.3.1
|
||||||
cryptography==43.0.1
|
cryptography==44.0.1
|
||||||
debugpy==1.8.7
|
debugpy==1.8.12
|
||||||
deepmerge==2.0
|
deepmerge==2.0
|
||||||
dirhash==0.5.0
|
dirhash==0.5.0
|
||||||
docker==7.1.0
|
docker==7.1.0
|
||||||
faust-cchardet==2.1.19
|
faust-cchardet==2.1.19
|
||||||
gitpython==3.1.43
|
gitpython==3.1.44
|
||||||
jinja2==3.1.4
|
jinja2==3.1.5
|
||||||
orjson==3.10.7
|
orjson==3.10.12
|
||||||
pulsectl==24.8.0
|
pulsectl==24.12.0
|
||||||
pyudev==0.24.3
|
pyudev==0.24.3
|
||||||
PyYAML==6.0.2
|
PyYAML==6.0.2
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
securetar==2024.2.1
|
securetar==2025.2.0
|
||||||
sentry-sdk==2.16.0
|
sentry-sdk==2.22.0
|
||||||
setuptools==75.1.0
|
setuptools==75.8.0
|
||||||
voluptuous==0.15.2
|
voluptuous==0.15.2
|
||||||
dbus-fast==2.24.3
|
dbus-fast==2.34.0
|
||||||
typing_extensions==4.12.2
|
typing_extensions==4.12.2
|
||||||
zlib-fast==0.2.0
|
zlib-fast==0.2.1
|
||||||
|
@@ -1,12 +1,13 @@
|
|||||||
coverage==7.6.3
|
astroid==3.3.8
|
||||||
pre-commit==4.0.1
|
coverage==7.6.12
|
||||||
pylint==3.3.1
|
pre-commit==4.1.0
|
||||||
pytest-aiohttp==1.0.5
|
pylint==3.3.4
|
||||||
pytest-asyncio==0.23.6
|
pytest-aiohttp==1.1.0
|
||||||
pytest-cov==5.0.0
|
pytest-asyncio==0.25.2
|
||||||
|
pytest-cov==6.0.0
|
||||||
pytest-timeout==2.3.1
|
pytest-timeout==2.3.1
|
||||||
pytest==8.3.3
|
pytest==8.3.4
|
||||||
ruff==0.6.9
|
ruff==0.9.7
|
||||||
time-machine==2.16.0
|
time-machine==2.16.0
|
||||||
typing_extensions==4.12.2
|
typing_extensions==4.12.2
|
||||||
urllib3==2.2.3
|
urllib3==2.3.0
|
||||||
|
@@ -1,30 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source "/etc/supervisor_scripts/common"
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Update frontend
|
|
||||||
git submodule update --init --recursive --remote
|
|
||||||
|
|
||||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
|
||||||
cd home-assistant-polymer
|
|
||||||
nvm install
|
|
||||||
script/bootstrap
|
|
||||||
|
|
||||||
# Download translations
|
|
||||||
start_docker
|
|
||||||
./script/translations_download
|
|
||||||
|
|
||||||
# build frontend
|
|
||||||
cd hassio
|
|
||||||
./script/build_hassio
|
|
||||||
|
|
||||||
# Copy frontend
|
|
||||||
rm -rf ../../supervisor/api/panel/*
|
|
||||||
cp -rf build/* ../../supervisor/api/panel/
|
|
||||||
|
|
||||||
# Reset frontend git
|
|
||||||
cd ..
|
|
||||||
git reset --hard HEAD
|
|
||||||
|
|
||||||
stop_docker
|
|
2
setup.py
2
setup.py
@@ -19,7 +19,7 @@ def _get_supervisor_version():
|
|||||||
for line in CONSTANTS.split("/n"):
|
for line in CONSTANTS.split("/n"):
|
||||||
if match := RE_SUPERVISOR_VERSION.match(line):
|
if match := RE_SUPERVISOR_VERSION.match(line):
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
return "99.9.9dev"
|
return "9999.09.9.dev9999"
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
@@ -6,6 +6,7 @@ from contextlib import suppress
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import errno
|
import errno
|
||||||
|
from functools import partial
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
@@ -81,7 +82,8 @@ from ..hardware.data import Device
|
|||||||
from ..homeassistant.const import WSEvent, WSType
|
from ..homeassistant.const import WSEvent, WSType
|
||||||
from ..jobs.const import JobExecutionLimit
|
from ..jobs.const import JobExecutionLimit
|
||||||
from ..jobs.decorator import Job
|
from ..jobs.decorator import Job
|
||||||
from ..resolution.const import UnhealthyReason
|
from ..resolution.const import ContextType, IssueType, UnhealthyReason
|
||||||
|
from ..resolution.data import Issue
|
||||||
from ..store.addon import AddonStore
|
from ..store.addon import AddonStore
|
||||||
from ..utils import check_port
|
from ..utils import check_port
|
||||||
from ..utils.apparmor import adjust_profile
|
from ..utils.apparmor import adjust_profile
|
||||||
@@ -144,11 +146,27 @@ class Addon(AddonModel):
|
|||||||
self._listeners: list[EventListener] = []
|
self._listeners: list[EventListener] = []
|
||||||
self._startup_event = asyncio.Event()
|
self._startup_event = asyncio.Event()
|
||||||
self._startup_task: asyncio.Task | None = None
|
self._startup_task: asyncio.Task | None = None
|
||||||
|
self._boot_failed_issue = Issue(
|
||||||
|
IssueType.BOOT_FAIL, ContextType.ADDON, reference=self.slug
|
||||||
|
)
|
||||||
|
self._device_access_missing_issue = Issue(
|
||||||
|
IssueType.DEVICE_ACCESS_MISSING, ContextType.ADDON, reference=self.slug
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Return internal representation."""
|
"""Return internal representation."""
|
||||||
return f"<Addon: {self.slug}>"
|
return f"<Addon: {self.slug}>"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot_failed_issue(self) -> Issue:
|
||||||
|
"""Get issue used if start on boot failed."""
|
||||||
|
return self._boot_failed_issue
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_access_missing_issue(self) -> Issue:
|
||||||
|
"""Get issue used if device access is missing and can't be automatically added."""
|
||||||
|
return self._device_access_missing_issue
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state(self) -> AddonState:
|
def state(self) -> AddonState:
|
||||||
"""Return state of the add-on."""
|
"""Return state of the add-on."""
|
||||||
@@ -166,6 +184,20 @@ class Addon(AddonModel):
|
|||||||
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
|
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
|
||||||
self._startup_event.set()
|
self._startup_event.set()
|
||||||
|
|
||||||
|
# Dismiss boot failed issue if present and we started
|
||||||
|
if (
|
||||||
|
new_state == AddonState.STARTED
|
||||||
|
and self.boot_failed_issue in self.sys_resolution.issues
|
||||||
|
):
|
||||||
|
self.sys_resolution.dismiss_issue(self.boot_failed_issue)
|
||||||
|
|
||||||
|
# Dismiss device access missing issue if present and we stopped
|
||||||
|
if (
|
||||||
|
new_state == AddonState.STOPPED
|
||||||
|
and self.device_access_missing_issue in self.sys_resolution.issues
|
||||||
|
):
|
||||||
|
self.sys_resolution.dismiss_issue(self.device_access_missing_issue)
|
||||||
|
|
||||||
self.sys_homeassistant.websocket.send_message(
|
self.sys_homeassistant.websocket.send_message(
|
||||||
{
|
{
|
||||||
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
||||||
@@ -322,6 +354,13 @@ class Addon(AddonModel):
|
|||||||
"""Store user boot options."""
|
"""Store user boot options."""
|
||||||
self.persist[ATTR_BOOT] = value
|
self.persist[ATTR_BOOT] = value
|
||||||
|
|
||||||
|
# Dismiss boot failed issue if present and boot at start disabled
|
||||||
|
if (
|
||||||
|
value == AddonBoot.MANUAL
|
||||||
|
and self._boot_failed_issue in self.sys_resolution.issues
|
||||||
|
):
|
||||||
|
self.sys_resolution.dismiss_issue(self._boot_failed_issue)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auto_update(self) -> bool:
|
def auto_update(self) -> bool:
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
@@ -1169,6 +1208,25 @@ class Addon(AddonModel):
|
|||||||
await self._backup_command(self.backup_post)
|
await self._backup_command(self.backup_post)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _is_excluded_by_filter(
|
||||||
|
self, origin_path: Path, arcname: str, item_arcpath: PurePath
|
||||||
|
) -> bool:
|
||||||
|
"""Filter out files from backup based on filters provided by addon developer.
|
||||||
|
|
||||||
|
This tests the dev provided filters against the full path of the file as
|
||||||
|
Supervisor sees them using match. This is done for legacy reasons, testing
|
||||||
|
against the relative path makes more sense and may be changed in the future.
|
||||||
|
"""
|
||||||
|
full_path = origin_path / item_arcpath.relative_to(arcname)
|
||||||
|
|
||||||
|
for exclude in self.backup_exclude:
|
||||||
|
if not full_path.match(exclude):
|
||||||
|
continue
|
||||||
|
_LOGGER.debug("Ignoring %s because of %s", full_path, exclude)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="addon_backup",
|
name="addon_backup",
|
||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
@@ -1180,46 +1238,45 @@ class Addon(AddonModel):
|
|||||||
Returns a Task that completes when addon has state 'started' (see start)
|
Returns a Task that completes when addon has state 'started' (see start)
|
||||||
for cold backup. Else nothing is returned.
|
for cold backup. Else nothing is returned.
|
||||||
"""
|
"""
|
||||||
wait_for_start: Awaitable[None] | None = None
|
|
||||||
|
|
||||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
def _addon_backup(
|
||||||
temp_path = Path(temp)
|
store_image: bool,
|
||||||
|
metadata: dict[str, Any],
|
||||||
|
apparmor_profile: str | None,
|
||||||
|
addon_config_used: bool,
|
||||||
|
):
|
||||||
|
"""Start the backup process."""
|
||||||
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||||
|
temp_path = Path(temp)
|
||||||
|
|
||||||
# store local image
|
# store local image
|
||||||
if self.need_build:
|
if store_image:
|
||||||
|
try:
|
||||||
|
self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
# Store local configs/state
|
||||||
try:
|
try:
|
||||||
await self.instance.export_image(temp_path.joinpath("image.tar"))
|
write_json_file(temp_path.joinpath("addon.json"), metadata)
|
||||||
except DockerError as err:
|
except ConfigurationFileError as err:
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
data = {
|
|
||||||
ATTR_USER: self.persist,
|
|
||||||
ATTR_SYSTEM: self.data,
|
|
||||||
ATTR_VERSION: self.version,
|
|
||||||
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Store local configs/state
|
|
||||||
try:
|
|
||||||
write_json_file(temp_path.joinpath("addon.json"), data)
|
|
||||||
except ConfigurationFileError as err:
|
|
||||||
raise AddonsError(
|
|
||||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
|
||||||
) from err
|
|
||||||
|
|
||||||
# Store AppArmor Profile
|
|
||||||
if self.sys_host.apparmor.exists(self.slug):
|
|
||||||
profile = temp_path.joinpath("apparmor.txt")
|
|
||||||
try:
|
|
||||||
await self.sys_host.apparmor.backup_profile(self.slug, profile)
|
|
||||||
except HostAppArmorError as err:
|
|
||||||
raise AddonsError(
|
raise AddonsError(
|
||||||
"Can't backup AppArmor profile", _LOGGER.error
|
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
# write into tarfile
|
# Store AppArmor Profile
|
||||||
def _write_tarfile():
|
if apparmor_profile:
|
||||||
"""Write tar inside loop."""
|
profile_backup_file = temp_path.joinpath("apparmor.txt")
|
||||||
|
try:
|
||||||
|
self.sys_host.apparmor.backup_profile(
|
||||||
|
apparmor_profile, profile_backup_file
|
||||||
|
)
|
||||||
|
except HostAppArmorError as err:
|
||||||
|
raise AddonsError(
|
||||||
|
"Can't backup AppArmor profile", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
# Write tarfile
|
||||||
with tar_file as backup:
|
with tar_file as backup:
|
||||||
# Backup metadata
|
# Backup metadata
|
||||||
backup.add(temp, arcname=".")
|
backup.add(temp, arcname=".")
|
||||||
@@ -1228,32 +1285,56 @@ class Addon(AddonModel):
|
|||||||
atomic_contents_add(
|
atomic_contents_add(
|
||||||
backup,
|
backup,
|
||||||
self.path_data,
|
self.path_data,
|
||||||
excludes=self.backup_exclude,
|
file_filter=partial(
|
||||||
|
self._is_excluded_by_filter, self.path_data, "data"
|
||||||
|
),
|
||||||
arcname="data",
|
arcname="data",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Backup config
|
# Backup config
|
||||||
if self.addon_config_used:
|
if addon_config_used:
|
||||||
atomic_contents_add(
|
atomic_contents_add(
|
||||||
backup,
|
backup,
|
||||||
self.path_config,
|
self.path_config,
|
||||||
excludes=self.backup_exclude,
|
file_filter=partial(
|
||||||
|
self._is_excluded_by_filter, self.path_config, "config"
|
||||||
|
),
|
||||||
arcname="config",
|
arcname="config",
|
||||||
)
|
)
|
||||||
|
|
||||||
is_running = await self.begin_backup()
|
wait_for_start: Awaitable[None] | None = None
|
||||||
try:
|
|
||||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
data = {
|
||||||
await self.sys_run_in_executor(_write_tarfile)
|
ATTR_USER: self.persist,
|
||||||
except (tarfile.TarError, OSError) as err:
|
ATTR_SYSTEM: self.data,
|
||||||
raise AddonsError(
|
ATTR_VERSION: self.version,
|
||||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
||||||
) from err
|
}
|
||||||
finally:
|
apparmor_profile = (
|
||||||
if is_running:
|
self.slug if self.sys_host.apparmor.exists(self.slug) else None
|
||||||
wait_for_start = await self.end_backup()
|
)
|
||||||
|
|
||||||
|
was_running = await self.begin_backup()
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||||
|
await self.sys_run_in_executor(
|
||||||
|
partial(
|
||||||
|
_addon_backup,
|
||||||
|
store_image=self.need_build,
|
||||||
|
metadata=data,
|
||||||
|
apparmor_profile=apparmor_profile,
|
||||||
|
addon_config_used=self.addon_config_used,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||||
|
except (tarfile.TarError, OSError) as err:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
finally:
|
||||||
|
if was_running:
|
||||||
|
wait_for_start = await self.end_backup()
|
||||||
|
|
||||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
@@ -1268,30 +1349,36 @@ class Addon(AddonModel):
|
|||||||
if addon is started after restore. Else nothing is returned.
|
if addon is started after restore. Else nothing is returned.
|
||||||
"""
|
"""
|
||||||
wait_for_start: Awaitable[None] | None = None
|
wait_for_start: Awaitable[None] | None = None
|
||||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
|
||||||
# extract backup
|
# Extract backup
|
||||||
def _extract_tarfile():
|
def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]:
|
||||||
"""Extract tar backup."""
|
"""Extract tar backup."""
|
||||||
|
tmp = TemporaryDirectory(dir=self.sys_config.path_tmp)
|
||||||
|
try:
|
||||||
with tar_file as backup:
|
with tar_file as backup:
|
||||||
backup.extractall(
|
backup.extractall(
|
||||||
path=Path(temp),
|
path=tmp.name,
|
||||||
members=secure_path(backup),
|
members=secure_path(backup),
|
||||||
filter="fully_trusted",
|
filter="fully_trusted",
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
data = read_json_file(Path(tmp.name, "addon.json"))
|
||||||
await self.sys_run_in_executor(_extract_tarfile)
|
except:
|
||||||
except tarfile.TarError as err:
|
tmp.cleanup()
|
||||||
raise AddonsError(
|
raise
|
||||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
|
||||||
) from err
|
|
||||||
|
|
||||||
# Read backup data
|
return tmp, data
|
||||||
try:
|
|
||||||
data = read_json_file(Path(temp, "addon.json"))
|
|
||||||
except ConfigurationFileError as err:
|
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
tmp, data = await self.sys_run_in_executor(_extract_tarfile)
|
||||||
|
except tarfile.TarError as err:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
except ConfigurationFileError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
try:
|
||||||
# Validate
|
# Validate
|
||||||
try:
|
try:
|
||||||
data = SCHEMA_ADDON_BACKUP(data)
|
data = SCHEMA_ADDON_BACKUP(data)
|
||||||
@@ -1325,7 +1412,7 @@ class Addon(AddonModel):
|
|||||||
if not await self.instance.exists():
|
if not await self.instance.exists():
|
||||||
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
||||||
|
|
||||||
image_file = Path(temp, "image.tar")
|
image_file = Path(tmp.name, "image.tar")
|
||||||
if image_file.is_file():
|
if image_file.is_file():
|
||||||
with suppress(DockerError):
|
with suppress(DockerError):
|
||||||
await self.instance.import_image(image_file)
|
await self.instance.import_image(image_file)
|
||||||
@@ -1344,13 +1431,13 @@ class Addon(AddonModel):
|
|||||||
# Restore data and config
|
# Restore data and config
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
"""Restore data and config."""
|
"""Restore data and config."""
|
||||||
temp_data = Path(temp, "data")
|
temp_data = Path(tmp.name, "data")
|
||||||
if temp_data.is_dir():
|
if temp_data.is_dir():
|
||||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||||
else:
|
else:
|
||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
temp_config = Path(temp, "config")
|
temp_config = Path(tmp.name, "config")
|
||||||
if temp_config.is_dir():
|
if temp_config.is_dir():
|
||||||
shutil.copytree(temp_config, self.path_config, symlinks=True)
|
shutil.copytree(temp_config, self.path_config, symlinks=True)
|
||||||
elif self.addon_config_used:
|
elif self.addon_config_used:
|
||||||
@@ -1370,7 +1457,7 @@ class Addon(AddonModel):
|
|||||||
) from err
|
) from err
|
||||||
|
|
||||||
# Restore AppArmor
|
# Restore AppArmor
|
||||||
profile_file = Path(temp, "apparmor.txt")
|
profile_file = Path(tmp.name, "apparmor.txt")
|
||||||
if profile_file.exists():
|
if profile_file.exists():
|
||||||
try:
|
try:
|
||||||
await self.sys_host.apparmor.load_profile(
|
await self.sys_host.apparmor.load_profile(
|
||||||
@@ -1378,7 +1465,8 @@ class Addon(AddonModel):
|
|||||||
)
|
)
|
||||||
except HostAppArmorError as err:
|
except HostAppArmorError as err:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Can't restore AppArmor profile for add-on %s", self.slug
|
"Can't restore AppArmor profile for add-on %s",
|
||||||
|
self.slug,
|
||||||
)
|
)
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
@@ -1390,7 +1478,8 @@ class Addon(AddonModel):
|
|||||||
# Run add-on
|
# Run add-on
|
||||||
if data[ATTR_STATE] == AddonState.STARTED:
|
if data[ATTR_STATE] == AddonState.STARTED:
|
||||||
wait_for_start = await self.start()
|
wait_for_start = await self.start()
|
||||||
|
finally:
|
||||||
|
tmp.cleanup()
|
||||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
|
@@ -7,24 +7,22 @@ import logging
|
|||||||
import tarfile
|
import tarfile
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
from attr import evolve
|
||||||
|
|
||||||
from ..const import AddonBoot, AddonStartup, AddonState
|
from ..const import AddonBoot, AddonStartup, AddonState
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
AddonConfigurationError,
|
|
||||||
AddonsError,
|
AddonsError,
|
||||||
AddonsJobError,
|
AddonsJobError,
|
||||||
AddonsNotSupportedError,
|
AddonsNotSupportedError,
|
||||||
CoreDNSError,
|
CoreDNSError,
|
||||||
DockerAPIError,
|
|
||||||
DockerError,
|
DockerError,
|
||||||
DockerNotFound,
|
|
||||||
HassioError,
|
HassioError,
|
||||||
HomeAssistantAPIError,
|
HomeAssistantAPIError,
|
||||||
)
|
)
|
||||||
from ..jobs.decorator import Job, JobCondition
|
from ..jobs.decorator import Job, JobCondition
|
||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
from ..store.addon import AddonStore
|
from ..store.addon import AddonStore
|
||||||
from ..utils import check_exception_chain
|
|
||||||
from ..utils.sentry import capture_exception
|
from ..utils.sentry import capture_exception
|
||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
from .const import ADDON_UPDATE_CONDITIONS
|
from .const import ADDON_UPDATE_CONDITIONS
|
||||||
@@ -118,15 +116,14 @@ class AddonManager(CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
if start_task := await addon.start():
|
if start_task := await addon.start():
|
||||||
wait_boot.append(start_task)
|
wait_boot.append(start_task)
|
||||||
except AddonsError as err:
|
|
||||||
# Check if there is an system/user issue
|
|
||||||
if check_exception_chain(
|
|
||||||
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
|
||||||
):
|
|
||||||
addon.boot = AddonBoot.MANUAL
|
|
||||||
addon.save_persist()
|
|
||||||
except HassioError:
|
except HassioError:
|
||||||
pass # These are already handled
|
self.sys_resolution.add_issue(
|
||||||
|
evolve(addon.boot_failed_issue),
|
||||||
|
suggestions=[
|
||||||
|
SuggestionType.EXECUTE_START,
|
||||||
|
SuggestionType.DISABLE_BOOT,
|
||||||
|
],
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -135,6 +132,19 @@ class AddonManager(CoreSysAttributes):
|
|||||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
||||||
await asyncio.gather(*wait_boot, return_exceptions=True)
|
await asyncio.gather(*wait_boot, return_exceptions=True)
|
||||||
|
|
||||||
|
# After waiting for startup, create an issue for boot addons that are error or unknown state
|
||||||
|
# Ignore stopped as single shot addons can be run at boot and this is successful exit
|
||||||
|
# Timeout waiting for startup is not a failure, addon is probably just slow
|
||||||
|
for addon in tasks:
|
||||||
|
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
|
||||||
|
self.sys_resolution.add_issue(
|
||||||
|
evolve(addon.boot_failed_issue),
|
||||||
|
suggestions=[
|
||||||
|
SuggestionType.EXECUTE_START,
|
||||||
|
SuggestionType.DISABLE_BOOT,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
async def shutdown(self, stage: AddonStartup) -> None:
|
async def shutdown(self, stage: AddonStartup) -> None:
|
||||||
"""Shutdown addons."""
|
"""Shutdown addons."""
|
||||||
tasks: list[Addon] = []
|
tasks: list[Addon] = []
|
||||||
|
@@ -47,7 +47,7 @@ from ..const import (
|
|||||||
ATTR_JOURNALD,
|
ATTR_JOURNALD,
|
||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATON,
|
ATTR_LOCATION,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -581,7 +581,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
@property
|
@property
|
||||||
def path_location(self) -> Path:
|
def path_location(self) -> Path:
|
||||||
"""Return path to this add-on."""
|
"""Return path to this add-on."""
|
||||||
return Path(self.data[ATTR_LOCATON])
|
return Path(self.data[ATTR_LOCATION])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_icon(self) -> Path:
|
def path_icon(self) -> Path:
|
||||||
|
@@ -46,6 +46,7 @@ def rating_security(addon: AddonModel) -> int:
|
|||||||
privilege in addon.privileged
|
privilege in addon.privileged
|
||||||
for privilege in (
|
for privilege in (
|
||||||
Capabilities.BPF,
|
Capabilities.BPF,
|
||||||
|
Capabilities.CHECKPOINT_RESTORE,
|
||||||
Capabilities.DAC_READ_SEARCH,
|
Capabilities.DAC_READ_SEARCH,
|
||||||
Capabilities.NET_ADMIN,
|
Capabilities.NET_ADMIN,
|
||||||
Capabilities.NET_RAW,
|
Capabilities.NET_RAW,
|
||||||
|
@@ -55,7 +55,7 @@ from ..const import (
|
|||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LABELS,
|
ATTR_LABELS,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATON,
|
ATTR_LOCATION,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -483,7 +483,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
|
|||||||
_migrate_addon_config(),
|
_migrate_addon_config(),
|
||||||
_SCHEMA_ADDON_CONFIG.extend(
|
_SCHEMA_ADDON_CONFIG.extend(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_LOCATON): str,
|
vol.Required(ATTR_LOCATION): str,
|
||||||
vol.Required(ATTR_REPOSITORY): str,
|
vol.Required(ATTR_REPOSITORY): str,
|
||||||
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
||||||
str: SCHEMA_ADDON_TRANSLATIONS
|
str: SCHEMA_ADDON_TRANSLATIONS
|
||||||
|
@@ -413,6 +413,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
# No need to capture HostNotSupportedError to Sentry, the cause
|
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||||
# is known and reported to the user using the resolution center.
|
# is known and reported to the user using the resolution center.
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
|
kwargs.pop("follow", None) # Follow is not supported for Docker logs
|
||||||
return await api_supervisor.logs(*args, **kwargs)
|
return await api_supervisor.logs(*args, **kwargs)
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
|
@@ -106,6 +106,7 @@ from ..exceptions import (
|
|||||||
APIAddonNotInstalled,
|
APIAddonNotInstalled,
|
||||||
APIError,
|
APIError,
|
||||||
APIForbidden,
|
APIForbidden,
|
||||||
|
APINotFound,
|
||||||
PwnedError,
|
PwnedError,
|
||||||
PwnedSecret,
|
PwnedSecret,
|
||||||
)
|
)
|
||||||
@@ -161,7 +162,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
addon = self.sys_addons.get(addon_slug)
|
addon = self.sys_addons.get(addon_slug)
|
||||||
if not addon:
|
if not addon:
|
||||||
raise APIError(f"Addon {addon_slug} does not exist")
|
raise APINotFound(f"Addon {addon_slug} does not exist")
|
||||||
if not isinstance(addon, Addon) or not addon.is_installed:
|
if not isinstance(addon, Addon) or not addon.is_installed:
|
||||||
raise APIAddonNotInstalled("Addon is not installed")
|
raise APIAddonNotInstalled("Addon is not installed")
|
||||||
|
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
"""Backups RESTful API."""
|
"""Backups RESTful API."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
import errno
|
import errno
|
||||||
@@ -12,8 +14,10 @@ from typing import Any
|
|||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from ..backups.backup import Backup
|
from ..backups.backup import Backup
|
||||||
|
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
|
||||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -22,44 +26,81 @@ from ..const import (
|
|||||||
ATTR_CONTENT,
|
ATTR_CONTENT,
|
||||||
ATTR_DATE,
|
ATTR_DATE,
|
||||||
ATTR_DAYS_UNTIL_STALE,
|
ATTR_DAYS_UNTIL_STALE,
|
||||||
|
ATTR_EXTRA,
|
||||||
|
ATTR_FILENAME,
|
||||||
ATTR_FOLDERS,
|
ATTR_FOLDERS,
|
||||||
ATTR_HOMEASSISTANT,
|
ATTR_HOMEASSISTANT,
|
||||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
||||||
ATTR_LOCATON,
|
ATTR_JOB_ID,
|
||||||
|
ATTR_LOCATION,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
|
ATTR_PATH,
|
||||||
ATTR_PROTECTED,
|
ATTR_PROTECTED,
|
||||||
ATTR_REPOSITORIES,
|
ATTR_REPOSITORIES,
|
||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
|
ATTR_SIZE_BYTES,
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
ATTR_SUPERVISOR_VERSION,
|
ATTR_SUPERVISOR_VERSION,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
REQUEST_FROM,
|
||||||
BusEvent,
|
BusEvent,
|
||||||
CoreState,
|
CoreState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError, APIForbidden, APINotFound
|
||||||
from ..jobs import JobSchedulerOptions
|
from ..jobs import JobSchedulerOptions
|
||||||
from ..mounts.const import MountUsage
|
from ..mounts.const import MountUsage
|
||||||
from ..resolution.const import UnhealthyReason
|
from ..resolution.const import UnhealthyReason
|
||||||
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
|
from .const import (
|
||||||
|
ATTR_ADDITIONAL_LOCATIONS,
|
||||||
|
ATTR_BACKGROUND,
|
||||||
|
ATTR_LOCATION_ATTRIBUTES,
|
||||||
|
ATTR_LOCATIONS,
|
||||||
|
CONTENT_TYPE_TAR,
|
||||||
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ALL_ADDONS_FLAG = "ALL"
|
||||||
|
|
||||||
|
LOCATION_LOCAL = ".local"
|
||||||
|
|
||||||
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
||||||
|
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
|
||||||
|
|
||||||
# Backwards compatible
|
# Backwards compatible
|
||||||
# Remove: 2022.08
|
# Remove: 2022.08
|
||||||
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_list(item: Any) -> list:
|
||||||
|
"""Ensure value is a list."""
|
||||||
|
if not isinstance(item, list):
|
||||||
|
return [item]
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_local_location(item: str | None) -> str | None:
|
||||||
|
"""Convert local location value."""
|
||||||
|
if item in {LOCATION_LOCAL, ""}:
|
||||||
|
return None
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique())
|
||||||
|
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
|
||||||
|
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
|
||||||
|
|
||||||
SCHEMA_RESTORE_FULL = vol.Schema(
|
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -67,40 +108,36 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
|||||||
{
|
{
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_NAME): str,
|
vol.Optional(ATTR_NAME): str,
|
||||||
|
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||||
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST,
|
||||||
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
||||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_EXTRA): dict,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
vol.Optional(ATTR_ADDONS): vol.Or(
|
||||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
ALL_ADDONS_FLAG, vol.All([str], vol.Unique())
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale})
|
||||||
{
|
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))})
|
||||||
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
|
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST})
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
SCHEMA_FREEZE = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class APIBackups(CoreSysAttributes):
|
class APIBackups(CoreSysAttributes):
|
||||||
@@ -110,9 +147,19 @@ class APIBackups(CoreSysAttributes):
|
|||||||
"""Return backup, throw an exception if it doesn't exist."""
|
"""Return backup, throw an exception if it doesn't exist."""
|
||||||
backup = self.sys_backups.get(request.match_info.get("slug"))
|
backup = self.sys_backups.get(request.match_info.get("slug"))
|
||||||
if not backup:
|
if not backup:
|
||||||
raise APIError("Backup does not exist")
|
raise APINotFound("Backup does not exist")
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
|
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
|
||||||
|
"""Make location attributes dictionary."""
|
||||||
|
return {
|
||||||
|
loc if loc else LOCATION_LOCAL: {
|
||||||
|
ATTR_PROTECTED: backup.all_locations[loc][ATTR_PROTECTED],
|
||||||
|
ATTR_SIZE_BYTES: backup.all_locations[loc][ATTR_SIZE_BYTES],
|
||||||
|
}
|
||||||
|
for loc in backup.locations
|
||||||
|
}
|
||||||
|
|
||||||
def _list_backups(self):
|
def _list_backups(self):
|
||||||
"""Return list of backups."""
|
"""Return list of backups."""
|
||||||
return [
|
return [
|
||||||
@@ -122,8 +169,11 @@ class APIBackups(CoreSysAttributes):
|
|||||||
ATTR_DATE: backup.date,
|
ATTR_DATE: backup.date,
|
||||||
ATTR_TYPE: backup.sys_type,
|
ATTR_TYPE: backup.sys_type,
|
||||||
ATTR_SIZE: backup.size,
|
ATTR_SIZE: backup.size,
|
||||||
ATTR_LOCATON: backup.location,
|
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||||
|
ATTR_LOCATION: backup.location,
|
||||||
|
ATTR_LOCATIONS: backup.locations,
|
||||||
ATTR_PROTECTED: backup.protected,
|
ATTR_PROTECTED: backup.protected,
|
||||||
|
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||||
ATTR_COMPRESSED: backup.compressed,
|
ATTR_COMPRESSED: backup.compressed,
|
||||||
ATTR_CONTENT: {
|
ATTR_CONTENT: {
|
||||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||||
@@ -132,6 +182,7 @@ class APIBackups(CoreSysAttributes):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
for backup in self.sys_backups.list_backups
|
for backup in self.sys_backups.list_backups
|
||||||
|
if backup.location != LOCATION_CLOUD_BACKUP
|
||||||
]
|
]
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -191,30 +242,53 @@ class APIBackups(CoreSysAttributes):
|
|||||||
ATTR_NAME: backup.name,
|
ATTR_NAME: backup.name,
|
||||||
ATTR_DATE: backup.date,
|
ATTR_DATE: backup.date,
|
||||||
ATTR_SIZE: backup.size,
|
ATTR_SIZE: backup.size,
|
||||||
|
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||||
ATTR_COMPRESSED: backup.compressed,
|
ATTR_COMPRESSED: backup.compressed,
|
||||||
ATTR_PROTECTED: backup.protected,
|
ATTR_PROTECTED: backup.protected,
|
||||||
|
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||||
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
||||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||||
ATTR_LOCATON: backup.location,
|
ATTR_LOCATION: backup.location,
|
||||||
|
ATTR_LOCATIONS: backup.locations,
|
||||||
ATTR_ADDONS: data_addons,
|
ATTR_ADDONS: data_addons,
|
||||||
ATTR_REPOSITORIES: backup.repositories,
|
ATTR_REPOSITORIES: backup.repositories,
|
||||||
ATTR_FOLDERS: backup.folders,
|
ATTR_FOLDERS: backup.folders,
|
||||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
||||||
|
ATTR_EXTRA: backup.extra,
|
||||||
}
|
}
|
||||||
|
|
||||||
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
|
||||||
"""Change location field to mount if necessary."""
|
"""Convert a single location to a mount if possible."""
|
||||||
if not body.get(ATTR_LOCATON):
|
if not location or location == LOCATION_CLOUD_BACKUP:
|
||||||
return body
|
return location
|
||||||
|
|
||||||
body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
|
mount = self.sys_mounts.get(location)
|
||||||
if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
|
if mount.usage != MountUsage.BACKUP:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
|
f"Mount {mount.name} is not used for backups, cannot backup to there"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return mount
|
||||||
|
|
||||||
|
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Change location field to mount if necessary."""
|
||||||
|
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION))
|
||||||
return body
|
return body
|
||||||
|
|
||||||
|
def _validate_cloud_backup_location(
|
||||||
|
self, request: web.Request, location: list[str | None] | str | None
|
||||||
|
) -> None:
|
||||||
|
"""Cloud backup location is only available to Home Assistant."""
|
||||||
|
if not isinstance(location, list):
|
||||||
|
location = [location]
|
||||||
|
if (
|
||||||
|
LOCATION_CLOUD_BACKUP in location
|
||||||
|
and request.get(REQUEST_FROM) != self.sys_homeassistant
|
||||||
|
):
|
||||||
|
raise APIForbidden(
|
||||||
|
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
|
||||||
|
)
|
||||||
|
|
||||||
async def _background_backup_task(
|
async def _background_backup_task(
|
||||||
self, backup_method: Callable, *args, **kwargs
|
self, backup_method: Callable, *args, **kwargs
|
||||||
) -> tuple[asyncio.Task, str]:
|
) -> tuple[asyncio.Task, str]:
|
||||||
@@ -234,24 +308,42 @@ class APIBackups(CoreSysAttributes):
|
|||||||
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
await asyncio.wait(
|
event_task = self.sys_create_task(event.wait())
|
||||||
|
_, pending = await asyncio.wait(
|
||||||
(
|
(
|
||||||
backup_task,
|
backup_task,
|
||||||
self.sys_create_task(event.wait()),
|
event_task,
|
||||||
),
|
),
|
||||||
return_when=asyncio.FIRST_COMPLETED,
|
return_when=asyncio.FIRST_COMPLETED,
|
||||||
)
|
)
|
||||||
|
# It seems backup returned early (error or something), make sure to cancel
|
||||||
|
# the event task to avoid "Task was destroyed but it is pending!" errors.
|
||||||
|
if event_task in pending:
|
||||||
|
event_task.cancel()
|
||||||
return (backup_task, job.uuid)
|
return (backup_task, job.uuid)
|
||||||
finally:
|
finally:
|
||||||
self.sys_bus.remove_listener(listener)
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_full(self, request):
|
async def backup_full(self, request: web.Request):
|
||||||
"""Create full backup."""
|
"""Create full backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||||
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
|
|
||||||
|
if ATTR_LOCATION in body:
|
||||||
|
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
||||||
|
self._validate_cloud_backup_location(request, location_names)
|
||||||
|
|
||||||
|
locations = [
|
||||||
|
self._location_to_mount(location) for location in location_names
|
||||||
|
]
|
||||||
|
body[ATTR_LOCATION] = locations.pop(0)
|
||||||
|
if locations:
|
||||||
|
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
||||||
|
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
backup_task, job_id = await self._background_backup_task(
|
backup_task, job_id = await self._background_backup_task(
|
||||||
self.sys_backups.do_backup_full, **self._location_to_mount(body)
|
self.sys_backups.do_backup_full, **body
|
||||||
)
|
)
|
||||||
|
|
||||||
if background and not backup_task.done():
|
if background and not backup_task.done():
|
||||||
@@ -266,12 +358,28 @@ class APIBackups(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_partial(self, request):
|
async def backup_partial(self, request: web.Request):
|
||||||
"""Create a partial backup."""
|
"""Create a partial backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||||
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
|
|
||||||
|
if ATTR_LOCATION in body:
|
||||||
|
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
||||||
|
self._validate_cloud_backup_location(request, location_names)
|
||||||
|
|
||||||
|
locations = [
|
||||||
|
self._location_to_mount(location) for location in location_names
|
||||||
|
]
|
||||||
|
body[ATTR_LOCATION] = locations.pop(0)
|
||||||
|
if locations:
|
||||||
|
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
||||||
|
|
||||||
|
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
|
||||||
|
body[ATTR_ADDONS] = list(self.sys_addons.local)
|
||||||
|
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
backup_task, job_id = await self._background_backup_task(
|
backup_task, job_id = await self._background_backup_task(
|
||||||
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
|
self.sys_backups.do_backup_partial, **body
|
||||||
)
|
)
|
||||||
|
|
||||||
if background and not backup_task.done():
|
if background and not backup_task.done():
|
||||||
@@ -286,10 +394,13 @@ class APIBackups(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_full(self, request):
|
async def restore_full(self, request: web.Request):
|
||||||
"""Full restore of a backup."""
|
"""Full restore of a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||||
|
self._validate_cloud_backup_location(
|
||||||
|
request, body.get(ATTR_LOCATION, backup.location)
|
||||||
|
)
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
restore_task, job_id = await self._background_backup_task(
|
restore_task, job_id = await self._background_backup_task(
|
||||||
self.sys_backups.do_restore_full, backup, **body
|
self.sys_backups.do_restore_full, backup, **body
|
||||||
@@ -303,10 +414,13 @@ class APIBackups(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_partial(self, request):
|
async def restore_partial(self, request: web.Request):
|
||||||
"""Partial restore a backup."""
|
"""Partial restore a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||||
|
self._validate_cloud_backup_location(
|
||||||
|
request, body.get(ATTR_LOCATION, backup.location)
|
||||||
|
)
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
restore_task, job_id = await self._background_backup_task(
|
restore_task, job_id = await self._background_backup_task(
|
||||||
self.sys_backups.do_restore_partial, backup, **body
|
self.sys_backups.do_restore_partial, backup, **body
|
||||||
@@ -320,38 +434,91 @@ class APIBackups(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def freeze(self, request):
|
async def freeze(self, request: web.Request):
|
||||||
"""Initiate manual freeze for external backup."""
|
"""Initiate manual freeze for external backup."""
|
||||||
body = await api_validate(SCHEMA_FREEZE, request)
|
body = await api_validate(SCHEMA_FREEZE, request)
|
||||||
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def thaw(self, request):
|
async def thaw(self, request: web.Request):
|
||||||
"""Begin thaw after manual freeze."""
|
"""Begin thaw after manual freeze."""
|
||||||
await self.sys_backups.thaw_all()
|
await self.sys_backups.thaw_all()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request: web.Request):
|
||||||
"""Remove a backup."""
|
"""Remove a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
return self.sys_backups.remove(backup)
|
body = await api_validate(SCHEMA_REMOVE, request)
|
||||||
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
|
|
||||||
async def download(self, request):
|
if ATTR_LOCATION in body:
|
||||||
|
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
|
||||||
|
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
|
||||||
|
else:
|
||||||
|
self._validate_cloud_backup_location(request, backup.location)
|
||||||
|
|
||||||
|
await self.sys_backups.remove(backup, locations=locations)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def download(self, request: web.Request):
|
||||||
"""Download a backup file."""
|
"""Download a backup file."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
|
# Query will give us '' for /backups, convert value to None
|
||||||
|
location = _convert_local_location(
|
||||||
|
request.query.get(ATTR_LOCATION, backup.location)
|
||||||
|
)
|
||||||
|
self._validate_cloud_backup_location(request, location)
|
||||||
|
if location not in backup.all_locations:
|
||||||
|
raise APIError(f"Backup {backup.slug} is not in location {location}")
|
||||||
|
|
||||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||||
response = web.FileResponse(backup.tarfile)
|
filename = backup.all_locations[location][ATTR_PATH]
|
||||||
|
# If the file is missing, return 404 and trigger reload of location
|
||||||
|
if not filename.is_file():
|
||||||
|
self.sys_create_task(self.sys_backups.reload(location))
|
||||||
|
return web.Response(status=404)
|
||||||
|
|
||||||
|
response = web.FileResponse(filename)
|
||||||
response.content_type = CONTENT_TYPE_TAR
|
response.content_type = CONTENT_TYPE_TAR
|
||||||
|
|
||||||
|
download_filename = filename.name
|
||||||
|
if download_filename == f"{backup.slug}.tar":
|
||||||
|
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||||
response.headers[CONTENT_DISPOSITION] = (
|
response.headers[CONTENT_DISPOSITION] = (
|
||||||
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
f"attachment; filename={download_filename}"
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def upload(self, request):
|
async def upload(self, request: web.Request):
|
||||||
"""Upload a backup file."""
|
"""Upload a backup file."""
|
||||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
location: LOCATION_TYPE = None
|
||||||
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
|
tmp_path = self.sys_config.path_tmp
|
||||||
|
if ATTR_LOCATION in request.query:
|
||||||
|
location_names: list[str] = request.query.getall(ATTR_LOCATION)
|
||||||
|
self._validate_cloud_backup_location(request, location_names)
|
||||||
|
# Convert empty string to None if necessary
|
||||||
|
locations = [
|
||||||
|
self._location_to_mount(location)
|
||||||
|
if _convert_local_location(location)
|
||||||
|
else None
|
||||||
|
for location in location_names
|
||||||
|
]
|
||||||
|
location = locations.pop(0)
|
||||||
|
|
||||||
|
if location and location != LOCATION_CLOUD_BACKUP:
|
||||||
|
tmp_path = location.local_where
|
||||||
|
|
||||||
|
filename: str | None = None
|
||||||
|
if ATTR_FILENAME in request.query:
|
||||||
|
filename = request.query.get(ATTR_FILENAME)
|
||||||
|
try:
|
||||||
|
vol.Match(RE_BACKUP_FILENAME)(filename)
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
raise APIError(humanize_error(filename, ex)) from None
|
||||||
|
|
||||||
|
with TemporaryDirectory(dir=tmp_path.as_posix()) as temp_dir:
|
||||||
tar_file = Path(temp_dir, "backup.tar")
|
tar_file = Path(temp_dir, "backup.tar")
|
||||||
reader = await request.multipart()
|
reader = await request.multipart()
|
||||||
contents = await reader.next()
|
contents = await reader.next()
|
||||||
@@ -364,7 +531,10 @@ class APIBackups(CoreSysAttributes):
|
|||||||
backup.write(chunk)
|
backup.write(chunk)
|
||||||
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
if err.errno == errno.EBADMSG:
|
if err.errno == errno.EBADMSG and location in {
|
||||||
|
LOCATION_CLOUD_BACKUP,
|
||||||
|
None,
|
||||||
|
}:
|
||||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
_LOGGER.error("Can't write new backup file: %s", err)
|
_LOGGER.error("Can't write new backup file: %s", err)
|
||||||
return False
|
return False
|
||||||
@@ -372,7 +542,14 @@ class APIBackups(CoreSysAttributes):
|
|||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
|
backup = await asyncio.shield(
|
||||||
|
self.sys_backups.import_backup(
|
||||||
|
tar_file,
|
||||||
|
filename,
|
||||||
|
location=location,
|
||||||
|
additional_locations=locations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_SLUG: backup.slug}
|
||||||
|
@@ -12,6 +12,7 @@ CONTENT_TYPE_X_LOG = "text/x-log"
|
|||||||
|
|
||||||
COOKIE_INGRESS = "ingress_session"
|
COOKIE_INGRESS = "ingress_session"
|
||||||
|
|
||||||
|
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
|
||||||
ATTR_AGENT_VERSION = "agent_version"
|
ATTR_AGENT_VERSION = "agent_version"
|
||||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||||
ATTR_ATTRIBUTES = "attributes"
|
ATTR_ATTRIBUTES = "attributes"
|
||||||
@@ -42,11 +43,12 @@ ATTR_GROUP_IDS = "group_ids"
|
|||||||
ATTR_IDENTIFIERS = "identifiers"
|
ATTR_IDENTIFIERS = "identifiers"
|
||||||
ATTR_IS_ACTIVE = "is_active"
|
ATTR_IS_ACTIVE = "is_active"
|
||||||
ATTR_IS_OWNER = "is_owner"
|
ATTR_IS_OWNER = "is_owner"
|
||||||
ATTR_JOB_ID = "job_id"
|
|
||||||
ATTR_JOBS = "jobs"
|
ATTR_JOBS = "jobs"
|
||||||
ATTR_LLMNR = "llmnr"
|
ATTR_LLMNR = "llmnr"
|
||||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||||
ATTR_LOCAL_ONLY = "local_only"
|
ATTR_LOCAL_ONLY = "local_only"
|
||||||
|
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
|
||||||
|
ATTR_LOCATIONS = "locations"
|
||||||
ATTR_MDNS = "mdns"
|
ATTR_MDNS = "mdns"
|
||||||
ATTR_MODEL = "model"
|
ATTR_MODEL = "model"
|
||||||
ATTR_MOUNTS = "mounts"
|
ATTR_MOUNTS = "mounts"
|
||||||
@@ -68,6 +70,7 @@ ATTR_UPDATE_TYPE = "update_type"
|
|||||||
ATTR_USAGE = "usage"
|
ATTR_USAGE = "usage"
|
||||||
ATTR_USE_NTP = "use_ntp"
|
ATTR_USE_NTP = "use_ntp"
|
||||||
ATTR_USERS = "users"
|
ATTR_USERS = "users"
|
||||||
|
ATTR_USER_PATH = "user_path"
|
||||||
ATTR_VENDOR = "vendor"
|
ATTR_VENDOR = "vendor"
|
||||||
ATTR_VIRTUALIZATION = "virtualization"
|
ATTR_VIRTUALIZATION = "virtualization"
|
||||||
|
|
||||||
|
@@ -16,7 +16,7 @@ from ..const import (
|
|||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, APIForbidden
|
from ..exceptions import APIForbidden, APINotFound
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -36,7 +36,7 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
"""Extract discovery message from URL."""
|
"""Extract discovery message from URL."""
|
||||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||||
if not message:
|
if not message:
|
||||||
raise APIError("Discovery message not found")
|
raise APINotFound("Discovery message not found")
|
||||||
return message
|
return message
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
|
@@ -16,6 +16,7 @@ from ..const import (
|
|||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APINotFound
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -58,6 +59,9 @@ class APIDocker(CoreSysAttributes):
|
|||||||
async def remove_registry(self, request: web.Request):
|
async def remove_registry(self, request: web.Request):
|
||||||
"""Delete a docker registry."""
|
"""Delete a docker registry."""
|
||||||
hostname = request.match_info.get(ATTR_HOSTNAME)
|
hostname = request.match_info.get(ATTR_HOSTNAME)
|
||||||
|
if hostname not in self.sys_docker.config.registries:
|
||||||
|
raise APINotFound(f"Hostname {hostname} does not exist in registries")
|
||||||
|
|
||||||
del self.sys_docker.config.registries[hostname]
|
del self.sys_docker.config.registries[hostname]
|
||||||
self.sys_docker.config.save_data()
|
self.sys_docker.config.save_data()
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@ import asyncio
|
|||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import ClientConnectionResetError, web
|
||||||
from aiohttp.hdrs import ACCEPT, RANGE
|
from aiohttp.hdrs import ACCEPT, RANGE
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.error import CoerceInvalid
|
from voluptuous.error import CoerceInvalid
|
||||||
@@ -239,12 +239,12 @@ class APIHost(CoreSysAttributes):
|
|||||||
# return 2 lines at minimum.
|
# return 2 lines at minimum.
|
||||||
lines = max(2, lines)
|
lines = max(2, lines)
|
||||||
# entries=cursor[[:num_skip]:num_entries]
|
# entries=cursor[[:num_skip]:num_entries]
|
||||||
range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
|
range_header = f"entries=:-{lines - 1}:{'' if follow else lines}"
|
||||||
elif RANGE in request.headers:
|
elif RANGE in request.headers:
|
||||||
range_header = request.headers.get(RANGE)
|
range_header = request.headers.get(RANGE)
|
||||||
else:
|
else:
|
||||||
range_header = (
|
range_header = (
|
||||||
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
|
f"entries=:-{DEFAULT_LINES - 1}:{'' if follow else DEFAULT_LINES}"
|
||||||
)
|
)
|
||||||
|
|
||||||
async with self.sys_host.logs.journald_logs(
|
async with self.sys_host.logs.journald_logs(
|
||||||
@@ -258,9 +258,13 @@ class APIHost(CoreSysAttributes):
|
|||||||
if not headers_returned:
|
if not headers_returned:
|
||||||
if cursor:
|
if cursor:
|
||||||
response.headers["X-First-Cursor"] = cursor
|
response.headers["X-First-Cursor"] = cursor
|
||||||
|
response.headers["X-Accel-Buffering"] = "no"
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
headers_returned = True
|
headers_returned = True
|
||||||
await response.write(line.encode("utf-8") + b"\n")
|
# When client closes the connection while reading busy logs, we
|
||||||
|
# sometimes get this exception. It should be safe to ignore it.
|
||||||
|
with suppress(ClientConnectionResetError):
|
||||||
|
await response.write(line.encode("utf-8") + b"\n")
|
||||||
except ConnectionResetError as ex:
|
except ConnectionResetError as ex:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
"Connection reset when trying to fetch data from systemd-journald."
|
"Connection reset when trying to fetch data from systemd-journald."
|
||||||
|
@@ -277,6 +277,7 @@ class APIIngress(CoreSysAttributes):
|
|||||||
response.content_type = content_type
|
response.content_type = content_type
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
response.headers["X-Accel-Buffering"] = "no"
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
async for data in result.content.iter_chunked(4096):
|
async for data in result.content.iter_chunked(4096):
|
||||||
await response.write(data)
|
await response.write(data)
|
||||||
|
@@ -7,7 +7,7 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError, APINotFound, JobNotFound
|
||||||
from ..jobs import SupervisorJob
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||||
from .const import ATTR_JOBS
|
from .const import ATTR_JOBS
|
||||||
@@ -23,10 +23,24 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIJobs(CoreSysAttributes):
|
class APIJobs(CoreSysAttributes):
|
||||||
"""Handle RESTful API for OS functions."""
|
"""Handle RESTful API for OS functions."""
|
||||||
|
|
||||||
|
def _extract_job(self, request: web.Request) -> SupervisorJob:
|
||||||
|
"""Extract job from request or raise."""
|
||||||
|
try:
|
||||||
|
return self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||||
|
except JobNotFound:
|
||||||
|
raise APINotFound("Job does not exist") from None
|
||||||
|
|
||||||
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
||||||
"""Return current job tree."""
|
"""Return current job tree.
|
||||||
|
|
||||||
|
Jobs are added to cache as they are created so by default they are in oldest to newest.
|
||||||
|
This is correct ordering for child jobs as it makes logical sense to present those in
|
||||||
|
the order they occurred within the parent. For the list as a whole, sort from newest
|
||||||
|
to oldest as its likely any client is most interested in the newer ones.
|
||||||
|
"""
|
||||||
|
# Initially sort oldest to newest so all child lists end up in correct order
|
||||||
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||||
for job in self.sys_jobs.jobs:
|
for job in sorted(self.sys_jobs.jobs):
|
||||||
if job.internal:
|
if job.internal:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -35,11 +49,15 @@ class APIJobs(CoreSysAttributes):
|
|||||||
else:
|
else:
|
||||||
jobs_by_parent[job.parent_id].append(job)
|
jobs_by_parent[job.parent_id].append(job)
|
||||||
|
|
||||||
|
# After parent-child organization, sort the root jobs only from newest to oldest
|
||||||
job_list: list[dict[str, Any]] = []
|
job_list: list[dict[str, Any]] = []
|
||||||
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
||||||
[(job_list, start)]
|
[(job_list, start)]
|
||||||
if start
|
if start
|
||||||
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
|
else [
|
||||||
|
(job_list, job)
|
||||||
|
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
while queue:
|
while queue:
|
||||||
@@ -86,13 +104,13 @@ class APIJobs(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Get details of a job by ID."""
|
"""Get details of a job by ID."""
|
||||||
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
job = self._extract_job(request)
|
||||||
return self._list_jobs(job)[0]
|
return self._list_jobs(job)[0]
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove_job(self, request: web.Request) -> None:
|
async def remove_job(self, request: web.Request) -> None:
|
||||||
"""Remove a completed job."""
|
"""Remove a completed job."""
|
||||||
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
job = self._extract_job(request)
|
||||||
|
|
||||||
if not job.done:
|
if not job.done:
|
||||||
raise APIError(f"Job {job.uuid} is not done!")
|
raise APIError(f"Job {job.uuid} is not done!")
|
||||||
|
@@ -7,11 +7,11 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from ..const import ATTR_NAME, ATTR_STATE
|
from ..const import ATTR_NAME, ATTR_STATE
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError, APINotFound
|
||||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
||||||
from ..mounts.mount import Mount
|
from ..mounts.mount import Mount
|
||||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
||||||
from .const import ATTR_MOUNTS
|
from .const import ATTR_MOUNTS, ATTR_USER_PATH
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
@@ -24,6 +24,13 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIMounts(CoreSysAttributes):
|
class APIMounts(CoreSysAttributes):
|
||||||
"""Handle REST API for mounting options."""
|
"""Handle REST API for mounting options."""
|
||||||
|
|
||||||
|
def _extract_mount(self, request: web.Request) -> Mount:
|
||||||
|
"""Extract mount from request or raise."""
|
||||||
|
name = request.match_info.get("mount")
|
||||||
|
if name not in self.sys_mounts:
|
||||||
|
raise APINotFound(f"No mount exists with name {name}")
|
||||||
|
return self.sys_mounts.get(name)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return MountManager info."""
|
"""Return MountManager info."""
|
||||||
@@ -32,7 +39,13 @@ class APIMounts(CoreSysAttributes):
|
|||||||
if self.sys_mounts.default_backup_mount
|
if self.sys_mounts.default_backup_mount
|
||||||
else None,
|
else None,
|
||||||
ATTR_MOUNTS: [
|
ATTR_MOUNTS: [
|
||||||
mount.to_dict() | {ATTR_STATE: mount.state}
|
mount.to_dict()
|
||||||
|
| {
|
||||||
|
ATTR_STATE: mount.state,
|
||||||
|
ATTR_USER_PATH: mount.container_where.as_posix()
|
||||||
|
if mount.container_where
|
||||||
|
else None,
|
||||||
|
}
|
||||||
for mount in self.sys_mounts.mounts
|
for mount in self.sys_mounts.mounts
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
@@ -79,15 +92,13 @@ class APIMounts(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def update_mount(self, request: web.Request) -> None:
|
async def update_mount(self, request: web.Request) -> None:
|
||||||
"""Update an existing mount in supervisor."""
|
"""Update an existing mount in supervisor."""
|
||||||
name = request.match_info.get("mount")
|
current = self._extract_mount(request)
|
||||||
name_schema = vol.Schema(
|
name_schema = vol.Schema(
|
||||||
{vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
|
{vol.Optional(ATTR_NAME, default=current.name): current.name},
|
||||||
|
extra=vol.ALLOW_EXTRA,
|
||||||
)
|
)
|
||||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
||||||
|
|
||||||
if name not in self.sys_mounts:
|
|
||||||
raise APIError(f"No mount exists with name {name}")
|
|
||||||
|
|
||||||
mount = Mount.from_dict(self.coresys, body)
|
mount = Mount.from_dict(self.coresys, body)
|
||||||
await self.sys_mounts.create_mount(mount)
|
await self.sys_mounts.create_mount(mount)
|
||||||
|
|
||||||
@@ -104,8 +115,8 @@ class APIMounts(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def delete_mount(self, request: web.Request) -> None:
|
async def delete_mount(self, request: web.Request) -> None:
|
||||||
"""Delete an existing mount in supervisor."""
|
"""Delete an existing mount in supervisor."""
|
||||||
name = request.match_info.get("mount")
|
current = self._extract_mount(request)
|
||||||
mount = await self.sys_mounts.remove_mount(name)
|
mount = await self.sys_mounts.remove_mount(current.name)
|
||||||
|
|
||||||
# If it was a backup mount, reload backups
|
# If it was a backup mount, reload backups
|
||||||
if mount.usage == MountUsage.BACKUP:
|
if mount.usage == MountUsage.BACKUP:
|
||||||
@@ -116,9 +127,9 @@ class APIMounts(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def reload_mount(self, request: web.Request) -> None:
|
async def reload_mount(self, request: web.Request) -> None:
|
||||||
"""Reload an existing mount in supervisor."""
|
"""Reload an existing mount in supervisor."""
|
||||||
name = request.match_info.get("mount")
|
mount = self._extract_mount(request)
|
||||||
await self.sys_mounts.reload_mount(name)
|
await self.sys_mounts.reload_mount(mount.name)
|
||||||
|
|
||||||
# If it's a backup mount, reload backups
|
# If it's a backup mount, reload backups
|
||||||
if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
|
if mount.usage == MountUsage.BACKUP:
|
||||||
self.sys_create_task(self.sys_backups.reload())
|
self.sys_create_task(self.sys_backups.reload())
|
||||||
|
@@ -42,7 +42,7 @@ from ..const import (
|
|||||||
DOCKER_NETWORK_MASK,
|
DOCKER_NETWORK_MASK,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HostNetworkNotFound
|
from ..exceptions import APIError, APINotFound, HostNetworkNotFound
|
||||||
from ..host.configuration import (
|
from ..host.configuration import (
|
||||||
AccessPoint,
|
AccessPoint,
|
||||||
Interface,
|
Interface,
|
||||||
@@ -167,7 +167,7 @@ class APINetwork(CoreSysAttributes):
|
|||||||
except HostNetworkNotFound:
|
except HostNetworkNotFound:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
raise APIError(f"Interface {name} does not exist") from None
|
raise APINotFound(f"Interface {name} does not exist") from None
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
|
@@ -1 +1 @@
|
|||||||
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()
|
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([4-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[89]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.9ac99222ee42fbb3.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.85ccafe1fda9d9a5.js")}else d("/api/hassio/app/frontend_es5/entrypoint.85ccafe1fda9d9a5.js")}()
|
BIN
supervisor/api/panel/entrypoint.js.br
Normal file
BIN
supervisor/api/panel/entrypoint.js.br
Normal file
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
|||||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
|
|
||||||
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"1081.e647cbe586ff9dd0.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"sXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,GACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js
Normal file
2
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
|
||||||
|
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map
|
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}
|
@@ -0,0 +1,2 @@
|
|||||||
|
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
|
||||||
|
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map
|
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}
|
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -0,0 +1 @@
|
|||||||
|
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1295"],{21393:function(s,n,e){e.r(n)}}]);
|
BIN
supervisor/api/panel/frontend_es5/1295.d3a5058b570b3a9e.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1295.d3a5058b570b3a9e.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1295.d3a5058b570b3a9e.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1295.d3a5058b570b3a9e.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1327.0e87d3390ae69a74.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1327.0e87d3390ae69a74.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1327.0e87d3390ae69a74.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1327.0e87d3390ae69a74.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1352.7d375e47a6d46f74.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1352.7d375e47a6d46f74.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1352.7d375e47a6d46f74.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1352.7d375e47a6d46f74.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/137.a19e63f43d6e9cc2.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/137.a19e63f43d6e9cc2.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/137.a19e63f43d6e9cc2.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/137.a19e63f43d6e9cc2.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -0,0 +1,2 @@
|
|||||||
|
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1390"],{48825:function(t,a,i){i.r(a);var e=i(73577),o=(i(71695),i(40251),i(47021),i(31622),i(57243)),n=i(50778),r=i(27486),s=i(36522),l=(i(73729),i(29073),i(56785)),c=i(74617),d=i(28008);let u,h,m,p=t=>t;const v=(0,r.Z)((()=>[{name:"default_backup_mount",required:!0,selector:{backup_location:{}}}]));(0,e.Z)([(0,n.Mo)("dialog-hassio-backup-location")],(function(t,a){return{F:class extends a{constructor(...a){super(...a),t(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_dialogParams",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_data",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_waiting",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_error",value:void 0},{kind:"method",key:"showDialog",value:async function(t){this._dialogParams=t}},{kind:"method",key:"closeDialog",value:function(){this._data=void 0,this._error=void 0,this._waiting=void 0,this._dialogParams=void 0,(0,s.B)(this,"dialog-closed",{dialog:this.localName})}},{kind:"method",key:"render",value:function(){return this._dialogParams?(0,o.dy)(u||(u=p` <ha-dialog open scrimClickAction escapeKeyAction .heading="${0}" @closed="${0}"> ${0} <ha-form .hass="${0}" .data="${0}" .schema="${0}" .computeLabel="${0}" .computeHelper="${0}" @value-changed="${0}" dialogInitialFocus></ha-form> <mwc-button slot="secondaryAction" @click="${0}" dialogInitialFocus> ${0} </mwc-button> <mwc-button .disabled="${0}" slot="primaryAction" @click="${0}"> ${0} </mwc-button> </ha-dialog> `),this._dialogParams.supervisor.localize("dialog.backup_location.title"),this.closeDialog,this._error?(0,o.dy)(h||(h=p`<ha-alert alert-type="error">${0}</ha-alert>`),this._error):o.Ld,this.hass,this._data,v(),this._computeLabelCallback,this._computeHelperCallback,this._valueChanged,this.closeDialog,this._dialogParams.supervisor.localize("common.cancel"),this._waiting||!this._data,this._changeMount,this._dialogParams.supervisor.localize("common.save")):o.Ld}},{kind:"field",key:"_computeLabelCallback",value(){return t=>this._dialogParams.supervisor.localize(`dialog.backup_location.options.${t.name}.name`)||t.name}},{kind:"field",key:"_computeHelperCallback",value(){return t=>this._dialogParams.supervisor.localize(`dialog.backup_location.options.${t.name}.description`)}},{kind:"method",key:"_valueChanged",value:function(t){const a=t.detail.value.default_backup_mount;this._data={default_backup_mount:"/backup"===a?null:a}}},{kind:"method",key:"_changeMount",value:async function(){if(this._data){this._error=void 0,this._waiting=!0;try{await(0,c.Cl)(this.hass,this._data)}catch(t){return this._error=(0,l.js)(t),void(this._waiting=!1)}this.closeDialog()}}},{kind:"get",static:!0,key:"styles",value:function(){return[d.Qx,d.yu,(0,o.iv)(m||(m=p`.delete-btn{--mdc-theme-primary:var(--error-color)}`))]}}]}}),o.oi)},74617:function(t,a,i){i.d(a,{Cl:()=>r,eX:()=>o,mw:()=>e,rE:()=>n});i(40251);let e=function(t){return t.BIND="bind",t.CIFS="cifs",t.NFS="nfs",t}({}),o=function(t){return t.BACKUP="backup",t.MEDIA="media",t.SHARE="share",t}({});const n=async t=>t.callWS({type:"supervisor/api",endpoint:"/mounts",method:"get",timeout:null}),r=async(t,a)=>t.callWS({type:"supervisor/api",endpoint:"/mounts/options",method:"post",timeout:null,data:a})},30338:function(t,a,i){var e=i(97934),o=i(71998),n=i(4576),r=i(36760);t.exports=function(t,a){a&&"string"==typeof t||o(t);var i=r(t);return n(o(void 0!==i?e(i,t):t))}},60933:function(t,a,i){var e=i(40810),o=i(57877),n=i(63983),r=i(12360),s=i(13053),l=i(47645);e({target:"Array",proto:!0},{flatMap:function(t){var a,i=r(this),e=s(i);return n(t),(a=l(i,0)).length=o(a,i,i,e,0,1,t,arguments.length>1?arguments[1]:void 0),a}})},32126:function(t,a,i){i(35709)("flatMap")},25677:function(t,a,i){var e=i(40810),o=i(97934),n=i(63983),r=i(71998),s=i(4576),l=i(30338),c=i(79995),d=i(14181),u=i(92288),h=c((function(){for(var t,a,i=this.iterator,e=this.mapper;;){if(a=this.inner)try{if(!(t=r(o(a.next,a.iterator))).done)return t.value;this.inner=null}catch(n){d(i,"throw",n)}if(t=r(o(this.next,i)),this.done=!!t.done)return;try{this.inner=l(e(t.value,this.counter++),!1)}catch(n){d(i,"throw",n)}}}));e({target:"Iterator",proto:!0,real:!0,forced:u},{flatMap:function(t){return r(this),n(t),new h(s(this),{mapper:t,inner:null})}})},34810:function(t,a,i){i(25677)}}]);
|
||||||
|
//# sourceMappingURL=1390.f8ddc371bbdbc7c7.js.map
|
BIN
supervisor/api/panel/frontend_es5/1390.f8ddc371bbdbc7c7.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1390.f8ddc371bbdbc7c7.js.br
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user