mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-22 23:49:21 +00:00
Compare commits
159 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c634cc1f34 | ||
![]() |
646725bb08 | ||
![]() |
618c89c4d8 | ||
![]() |
0dc442d0cb | ||
![]() |
6ae664b448 | ||
![]() |
18b43ce767 | ||
![]() |
f9b474866b | ||
![]() |
1a76035682 | ||
![]() |
e332f4b2bd | ||
![]() |
ab27fd7b57 | ||
![]() |
12c0faf803 | ||
![]() |
c0a409b25f | ||
![]() |
2be33a80a7 | ||
![]() |
d684aab207 | ||
![]() |
ec6da7851e | ||
![]() |
eb621f6a2c | ||
![]() |
a1a9c55542 | ||
![]() |
d15a7c27ca | ||
![]() |
fb46335d16 | ||
![]() |
48e666e1fc | ||
![]() |
ff462ae976 | ||
![]() |
23731d9a6e | ||
![]() |
30df8ce5c7 | ||
![]() |
951efd6b29 | ||
![]() |
262fd05c6d | ||
![]() |
2a6fc512e7 | ||
![]() |
bb0d89f8fd | ||
![]() |
e9ccc7ee19 | ||
![]() |
a5103cc329 | ||
![]() |
c24b811180 | ||
![]() |
611963f5dd | ||
![]() |
0958cd0c06 | ||
![]() |
c406814794 | ||
![]() |
c3459fd32a | ||
![]() |
2072370ccc | ||
![]() |
615758a1df | ||
![]() |
cd10b597dd | ||
![]() |
50c277137d | ||
![]() |
99bc201688 | ||
![]() |
0b09eb3659 | ||
![]() |
a6795536ad | ||
![]() |
a46536e9be | ||
![]() |
c01bed9d97 | ||
![]() |
2f4e06aadf | ||
![]() |
b8249548ae | ||
![]() |
5f98ab7e3e | ||
![]() |
d195f19fa8 | ||
![]() |
c67d4d7c0b | ||
![]() |
5aa8028ff5 | ||
![]() |
b71c6c60da | ||
![]() |
4f272ad4fd | ||
![]() |
611128c014 | ||
![]() |
cbf73ceaa3 | ||
![]() |
01e24a3e74 | ||
![]() |
10dcf5c12f | ||
![]() |
ebae1e70ee | ||
![]() |
b1ddb917c8 | ||
![]() |
d6c25c4188 | ||
![]() |
170e85396e | ||
![]() |
bf48d48c51 | ||
![]() |
fc646db95f | ||
![]() |
0769af9383 | ||
![]() |
1f28e6ad93 | ||
![]() |
2dab39bf90 | ||
![]() |
dcd0592d44 | ||
![]() |
7c4b20380e | ||
![]() |
1d304bd6ff | ||
![]() |
4ea27f6311 | ||
![]() |
3dc36c3402 | ||
![]() |
bae7fe4184 | ||
![]() |
df030e6209 | ||
![]() |
09d60b4957 | ||
![]() |
004065ae33 | ||
![]() |
854d337dd3 | ||
![]() |
2c5bb3f714 | ||
![]() |
7b63544474 | ||
![]() |
97af1fc66e | ||
![]() |
32d65722e9 | ||
![]() |
d5f9fcfdc7 | ||
![]() |
ffa524d3a4 | ||
![]() |
9c7de4a6c3 | ||
![]() |
b4e1e3e853 | ||
![]() |
c7f7fbd41a | ||
![]() |
cbddca2658 | ||
![]() |
f4811a0243 | ||
![]() |
024b813865 | ||
![]() |
5919bc2252 | ||
![]() |
8bca34ec6b | ||
![]() |
8b5e96a8ad | ||
![]() |
2d908ffcec | ||
![]() |
c3f7a45d61 | ||
![]() |
97b05c2078 | ||
![]() |
aa9a774939 | ||
![]() |
3388a13693 | ||
![]() |
9957e3dd4c | ||
![]() |
01c2bd1b0c | ||
![]() |
2cd7f9d1b0 | ||
![]() |
5fc9484f73 | ||
![]() |
e6dfe83d62 | ||
![]() |
3f88236495 | ||
![]() |
96065ed704 | ||
![]() |
7754424cb8 | ||
![]() |
be842d5e6c | ||
![]() |
c8f184f24c | ||
![]() |
e82cb5da45 | ||
![]() |
a968f6e90a | ||
![]() |
3eac3a6178 | ||
![]() |
b831dce443 | ||
![]() |
e62324e43f | ||
![]() |
a92058e6fc | ||
![]() |
29b2de6998 | ||
![]() |
057a048504 | ||
![]() |
29a1e6f68b | ||
![]() |
702cb4f5be | ||
![]() |
13c10dbb47 | ||
![]() |
2279c813d0 | ||
![]() |
1b52b2d23b | ||
![]() |
27ac96f5f9 | ||
![]() |
f87209f66f | ||
![]() |
b670efa47f | ||
![]() |
c749e21d3f | ||
![]() |
4f8f28b9f6 | ||
![]() |
2b4f46f6b3 | ||
![]() |
5d6e2eeaac | ||
![]() |
a45789c906 | ||
![]() |
d097044fa8 | ||
![]() |
73778780ef | ||
![]() |
df05c844c0 | ||
![]() |
ebeff31bf6 | ||
![]() |
037e42e894 | ||
![]() |
13db0e5c70 | ||
![]() |
dab75b597c | ||
![]() |
a1bab8ad08 | ||
![]() |
48c5dd064c | ||
![]() |
fd998155c2 | ||
![]() |
4a3ab4ba8d | ||
![]() |
c76e7a22df | ||
![]() |
d19166bb86 | ||
![]() |
14bc771ba9 | ||
![]() |
8f84eaa096 | ||
![]() |
2fd51c36b8 | ||
![]() |
c473d7ca62 | ||
![]() |
2de5b2f0fb | ||
![]() |
cf30810677 | ||
![]() |
a8dc842f97 | ||
![]() |
38509aa3b8 | ||
![]() |
9be2b3bced | ||
![]() |
ceed1bc318 | ||
![]() |
389aab8d4a | ||
![]() |
8b7aa7640c | ||
![]() |
a5cc3cba63 | ||
![]() |
9266062709 | ||
![]() |
bacedd1622 | ||
![]() |
7227f022b1 | ||
![]() |
0ce91f2e25 | ||
![]() |
fdb195cf59 | ||
![]() |
b85936774a | ||
![]() |
bd106be026 | ||
![]() |
e588541fe3 |
@@ -10,7 +10,7 @@
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"mounts": [ "type=volume,target=/var/lib/docker" ],
|
||||
"mounts": ["type=volume,target=/var/lib/docker"],
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
@@ -26,7 +26,7 @@
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py39"],
|
||||
"python.formatting.blackArgs": ["--target-version", "py310"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
|
39
.github/workflows/builder.yml
vendored
39
.github/workflows/builder.yml
vendored
@@ -33,10 +33,9 @@ on:
|
||||
- setup.py
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.9
|
||||
DEFAULT_PYTHON: "3.10"
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
WHEELS_TAG: 3.9-alpine3.14
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -89,17 +88,25 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Write env-file
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
run: |
|
||||
(
|
||||
# Fix out of memory issues with rust
|
||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
||||
) > .env_file
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@master
|
||||
uses: home-assistant/wheels@2022.06.7
|
||||
with:
|
||||
tag: ${{ env.WHEELS_TAG }}
|
||||
abi: cp310
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-host: wheels.hass.io
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
wheels-user: wheels
|
||||
apk: "build-base;libffi-dev;openssl-dev;cargo"
|
||||
apk: "libffi-dev;openssl-dev"
|
||||
skip-binary: aiohttp
|
||||
env-file: true
|
||||
requirements: "requirements.txt"
|
||||
|
||||
- name: Set version
|
||||
@@ -128,7 +135,7 @@ jobs:
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2022.06.2
|
||||
uses: home-assistant/builder@2022.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -151,7 +158,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/setup-python@v4.0.0
|
||||
uses: actions/setup-python@v4.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -213,7 +220,7 @@ jobs:
|
||||
|
||||
- name: Build the Supervisor
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
uses: home-assistant/builder@2022.06.2
|
||||
uses: home-assistant/builder@2022.09.0
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
@@ -290,6 +297,12 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Make sure its state is started
|
||||
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
|
||||
if [ "$test" != "started" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check the Supervisor code sign
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
@@ -362,6 +375,12 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Make sure its state is started
|
||||
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
|
||||
if [ "$test" != "started" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Restore SSL directory from backup
|
||||
run: |
|
||||
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
|
||||
|
126
.github/workflows/ci.yaml
vendored
126
.github/workflows/ci.yaml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.9
|
||||
DEFAULT_PYTHON: "3.10"
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
DEFAULT_CAS: v1.0.2
|
||||
|
||||
@@ -17,21 +17,20 @@ jobs:
|
||||
# This prevent upcoming jobs to do the same individually
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.9]
|
||||
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||
outputs:
|
||||
python-version: ${{ steps.python.outputs.python-version }}
|
||||
name: Prepare Python dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@v4.0.0
|
||||
uses: actions/setup-python@v4.2.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -45,7 +44,7 @@ jobs:
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -65,18 +64,18 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -109,18 +108,18 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -128,7 +127,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -153,18 +152,18 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -185,18 +184,18 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -204,7 +203,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -226,18 +225,18 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -245,7 +244,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -270,18 +269,18 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -302,18 +301,18 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -321,7 +320,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -339,29 +338,26 @@ jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.9]
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Install CAS tools
|
||||
uses: home-assistant/actions/helpers/cas@master
|
||||
with:
|
||||
version: ${{ env.DEFAULT_CAS }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -400,22 +396,22 @@ jobs:
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
needs: ["pytest", "prepare"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.0.4
|
||||
uses: actions/cache@v3.0.10
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -430,4 +426,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3.1.0
|
||||
uses: codecov/codecov-action@v3.1.1
|
||||
|
2
.github/workflows/release-drafter.yml
vendored
2
.github/workflows/release-drafter.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
echo "::set-output name=version::$datepre.$newpost"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v5.20.0
|
||||
uses: release-drafter/release-drafter@v5.21.0
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
|
2
.github/workflows/sentry.yaml
vendored
2
.github/workflows/sentry.yaml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.0.2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.1.6
|
||||
uses: getsentry/action-release@v1.2.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
|
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
@@ -9,10 +9,10 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v5.0.0
|
||||
- uses: actions/stale@v6.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
days-before-stale: 30
|
||||
days-before-close: 7
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
||||
|
@@ -1,13 +1,13 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
rev: 22.6.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py39
|
||||
- py310
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
@@ -31,4 +31,4 @@ repos:
|
||||
rev: v2.32.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py39-plus]
|
||||
args: [--py310-plus]
|
||||
|
@@ -6,7 +6,6 @@ ENV \
|
||||
SUPERVISOR_API=http://localhost
|
||||
|
||||
ARG \
|
||||
BUILD_ARCH \
|
||||
CAS_VERSION
|
||||
|
||||
# Install base
|
||||
@@ -40,7 +39,7 @@ COPY requirements.txt .
|
||||
RUN \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||
"https://wheels.home-assistant.io/musllinux/" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,11 +1,11 @@
|
||||
image: homeassistant/{arch}-hassio-supervisor
|
||||
shadow_repository: ghcr.io/home-assistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.9-alpine3.14
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.9-alpine3.14
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.9-alpine3.14
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.9-alpine3.14
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.9-alpine3.14
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.10-alpine3.16
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.10-alpine3.16
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.10-alpine3.16
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.10-alpine3.16
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.10-alpine3.16
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
Submodule home-assistant-polymer updated: e7848262ea...255cb23c7d
2
pytest.ini
Normal file
2
pytest.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[pytest]
|
||||
asyncio_mode = auto
|
@@ -1,25 +1,25 @@
|
||||
aiodns==3.0.0
|
||||
aiohttp==3.8.1
|
||||
aiohttp==3.8.3
|
||||
async_timeout==4.0.2
|
||||
atomicwrites==1.4.0
|
||||
attrs==21.4.0
|
||||
awesomeversion==22.5.2
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==22.1.0
|
||||
awesomeversion==22.9.0
|
||||
brotli==1.0.9
|
||||
cchardet==2.1.7
|
||||
ciso8601==2.2.0
|
||||
colorlog==6.6.0
|
||||
colorlog==6.7.0
|
||||
cpe==1.2.1
|
||||
cryptography==36.0.2
|
||||
debugpy==1.6.0
|
||||
cryptography==38.0.1
|
||||
debugpy==1.6.3
|
||||
deepmerge==1.0.1
|
||||
dirhash==0.2.1
|
||||
docker==5.0.3
|
||||
docker==6.0.0
|
||||
gitpython==3.1.27
|
||||
jinja2==3.1.2
|
||||
pulsectl==22.3.2
|
||||
pyudev==0.23.2
|
||||
ruamel.yaml==0.17.17
|
||||
pyudev==0.24.0
|
||||
ruamel.yaml==0.17.21
|
||||
securetar==2022.2.0
|
||||
sentry-sdk==1.5.12
|
||||
sentry-sdk==1.9.10
|
||||
voluptuous==0.13.1
|
||||
dbus-next==0.2.3
|
||||
dbus-fast==1.24.0
|
||||
|
@@ -1,15 +1,15 @@
|
||||
black==22.3.0
|
||||
black==22.8.0
|
||||
codecov==2.1.12
|
||||
coverage==6.4.1
|
||||
coverage==6.5.0
|
||||
flake8-docstrings==1.6.0
|
||||
flake8==4.0.1
|
||||
pre-commit==2.19.0
|
||||
flake8==5.0.4
|
||||
pre-commit==2.20.0
|
||||
pydocstyle==6.1.1
|
||||
pylint==2.14.3
|
||||
pytest-aiohttp==0.3.0
|
||||
pytest-asyncio==0.12.0 # NB!: Versions over 0.12.0 breaks pytest-aiohttp (https://github.com/aio-libs/pytest-aiohttp/issues/16)
|
||||
pytest-cov==3.0.0
|
||||
pylint==2.15.3
|
||||
pytest-aiohttp==1.0.4
|
||||
pytest-asyncio==0.18.3
|
||||
pytest-cov==4.0.0
|
||||
pytest-timeout==2.1.0
|
||||
pytest==7.1.2
|
||||
pyupgrade==2.34.0
|
||||
time-machine==2.7.1
|
||||
pytest==7.1.3
|
||||
pyupgrade==3.0.0
|
||||
time-machine==2.8.2
|
||||
|
@@ -28,7 +28,8 @@ if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
|
||||
# Init async event loop
|
||||
loop = asyncio.get_event_loop()
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
# Check if all information are available to setup Supervisor
|
||||
bootstrap.check_environment()
|
||||
|
@@ -3,7 +3,7 @@ import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Optional, Union
|
||||
from typing import Union
|
||||
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
@@ -24,6 +24,7 @@ from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils import check_exception_chain
|
||||
from .addon import Addon
|
||||
from .const import ADDON_UPDATE_CONDITIONS
|
||||
from .data import AddonsData
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -52,7 +53,7 @@ class AddonManager(CoreSysAttributes):
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
def get(self, addon_slug: str, local_only: bool = False) -> Optional[AnyAddon]:
|
||||
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
Prio:
|
||||
@@ -65,7 +66,7 @@ class AddonManager(CoreSysAttributes):
|
||||
return self.store.get(addon_slug)
|
||||
return None
|
||||
|
||||
def from_token(self, token: str) -> Optional[Addon]:
|
||||
def from_token(self, token: str) -> Addon | None:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.supervisor_token:
|
||||
@@ -144,11 +145,7 @@ class AddonManager(CoreSysAttributes):
|
||||
self.sys_capture_exception(err)
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
@@ -167,6 +164,7 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
self.data.install(store)
|
||||
addon = Addon(self.coresys, slug)
|
||||
await addon.load()
|
||||
|
||||
if not addon.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
@@ -205,7 +203,7 @@ class AddonManager(CoreSysAttributes):
|
||||
else:
|
||||
addon.state = AddonState.UNKNOWN
|
||||
|
||||
await addon.remove_data()
|
||||
await addon.unload()
|
||||
|
||||
# Cleanup audio settings
|
||||
if addon.path_pulse.exists():
|
||||
@@ -245,14 +243,10 @@ class AddonManager(CoreSysAttributes):
|
||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(self, slug: str, backup: Optional[bool] = False) -> None:
|
||||
async def update(self, slug: str, backup: bool | None = False) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
|
@@ -10,7 +10,7 @@ import secrets
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Awaitable, Final, Optional
|
||||
from typing import Any, Awaitable, Final
|
||||
|
||||
import aiohttp
|
||||
from deepmerge import Merger
|
||||
@@ -18,6 +18,7 @@ from securetar import atomic_contents_add, secure_path
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..bus import EventListener
|
||||
from ..const import (
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_AUDIO_INPUT,
|
||||
@@ -48,25 +49,36 @@ from ..const import (
|
||||
AddonBoot,
|
||||
AddonStartup,
|
||||
AddonState,
|
||||
BusEvent,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..docker.const import ContainerState
|
||||
from ..docker.monitor import DockerContainerStateEvent
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
ConfigurationFileError,
|
||||
DockerError,
|
||||
DockerRequestError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from ..homeassistant.const import WSEvent, WSType
|
||||
from ..jobs.const import JobExecutionLimit
|
||||
from ..jobs.decorator import Job
|
||||
from ..utils import check_port
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from .const import AddonBackupMode
|
||||
from .const import (
|
||||
WATCHDOG_MAX_ATTEMPTS,
|
||||
WATCHDOG_RETRY_SECONDS,
|
||||
WATCHDOG_THROTTLE_MAX_CALLS,
|
||||
WATCHDOG_THROTTLE_PERIOD,
|
||||
AddonBackupMode,
|
||||
)
|
||||
from .model import AddonModel, Data
|
||||
from .options import AddonOptions
|
||||
from .utils import remove_data
|
||||
@@ -101,6 +113,58 @@ class Addon(AddonModel):
|
||||
super().__init__(coresys, slug)
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self._state: AddonState = AddonState.UNKNOWN
|
||||
self._manual_stop: bool = (
|
||||
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
|
||||
)
|
||||
self._listeners: list[EventListener] = []
|
||||
|
||||
@Job(
|
||||
name=f"addon_{slug}_restart_after_problem",
|
||||
limit=JobExecutionLimit.THROTTLE_RATE_LIMIT,
|
||||
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
||||
throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restart_after_problem(addon: Addon, state: ContainerState):
|
||||
"""Restart unhealthy or failed addon."""
|
||||
attempts = 0
|
||||
while await addon.instance.current_state() == state:
|
||||
if not addon.in_progress:
|
||||
_LOGGER.warning(
|
||||
"Watchdog found addon %s is %s, restarting...",
|
||||
addon.name,
|
||||
state.value,
|
||||
)
|
||||
try:
|
||||
if state == ContainerState.FAILED:
|
||||
# Ensure failed container is removed before attempting reanimation
|
||||
if attempts == 0:
|
||||
with suppress(DockerError):
|
||||
await addon.instance.stop(remove_container=True)
|
||||
|
||||
await addon.start()
|
||||
else:
|
||||
await addon.restart()
|
||||
except AddonsError as err:
|
||||
attempts = attempts + 1
|
||||
_LOGGER.error(
|
||||
"Watchdog restart of addon %s failed!", addon.name
|
||||
)
|
||||
addon.sys_capture_exception(err)
|
||||
else:
|
||||
break
|
||||
|
||||
if attempts >= WATCHDOG_MAX_ATTEMPTS:
|
||||
_LOGGER.critical(
|
||||
"Watchdog cannot restart addon %s, failed all %s attempts",
|
||||
addon.name,
|
||||
attempts,
|
||||
)
|
||||
break
|
||||
|
||||
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)
|
||||
|
||||
self._restart_after_problem = restart_after_problem
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return internal representation."""
|
||||
@@ -135,15 +199,20 @@ class Addon(AddonModel):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
self._listeners.append(
|
||||
self.sys_bus.register_event(
|
||||
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
|
||||
)
|
||||
)
|
||||
self._listeners.append(
|
||||
self.sys_bus.register_event(
|
||||
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.watchdog_container
|
||||
)
|
||||
)
|
||||
|
||||
with suppress(DockerError):
|
||||
await self.instance.attach(version=self.version)
|
||||
|
||||
# Evaluate state
|
||||
if await self.instance.is_running():
|
||||
self.state = AddonState.STARTED
|
||||
else:
|
||||
self.state = AddonState.STOPPED
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP of add-on instance."""
|
||||
@@ -180,7 +249,7 @@ class Addon(AddonModel):
|
||||
return self._available(self.data_store)
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
def version(self) -> str | None:
|
||||
"""Return installed version."""
|
||||
return self.persist[ATTR_VERSION]
|
||||
|
||||
@@ -204,7 +273,7 @@ class Addon(AddonModel):
|
||||
)
|
||||
|
||||
@options.setter
|
||||
def options(self, value: Optional[dict[str, Any]]) -> None:
|
||||
def options(self, value: dict[str, Any] | None) -> None:
|
||||
"""Store user add-on options."""
|
||||
self.persist[ATTR_OPTIONS] = {} if value is None else deepcopy(value)
|
||||
|
||||
@@ -249,17 +318,17 @@ class Addon(AddonModel):
|
||||
return self.persist[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
def supervisor_token(self) -> str | None:
|
||||
"""Return access token for Supervisor API."""
|
||||
return self.persist.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
def ingress_token(self) -> str | None:
|
||||
"""Return access token for Supervisor API."""
|
||||
return self.persist.get(ATTR_INGRESS_TOKEN)
|
||||
|
||||
@property
|
||||
def ingress_entry(self) -> Optional[str]:
|
||||
def ingress_entry(self) -> str | None:
|
||||
"""Return ingress external URL."""
|
||||
if self.with_ingress:
|
||||
return f"/api/hassio_ingress/{self.ingress_token}"
|
||||
@@ -281,12 +350,12 @@ class Addon(AddonModel):
|
||||
self.persist[ATTR_PROTECTED] = value
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[dict[str, Optional[int]]]:
|
||||
def ports(self) -> dict[str, int | None] | None:
|
||||
"""Return ports of add-on."""
|
||||
return self.persist.get(ATTR_NETWORK, super().ports)
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value: Optional[dict[str, Optional[int]]]) -> None:
|
||||
def ports(self, value: dict[str, int | None] | None) -> None:
|
||||
"""Set custom ports of add-on."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_NETWORK, None)
|
||||
@@ -301,7 +370,7 @@ class Addon(AddonModel):
|
||||
self.persist[ATTR_NETWORK] = new_ports
|
||||
|
||||
@property
|
||||
def ingress_url(self) -> Optional[str]:
|
||||
def ingress_url(self) -> str | None:
|
||||
"""Return URL to ingress url."""
|
||||
if not self.with_ingress:
|
||||
return None
|
||||
@@ -312,7 +381,7 @@ class Addon(AddonModel):
|
||||
return url
|
||||
|
||||
@property
|
||||
def webui(self) -> Optional[str]:
|
||||
def webui(self) -> str | None:
|
||||
"""Return URL to webui or None."""
|
||||
url = super().webui
|
||||
if not url:
|
||||
@@ -340,7 +409,7 @@ class Addon(AddonModel):
|
||||
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||
|
||||
@property
|
||||
def ingress_port(self) -> Optional[int]:
|
||||
def ingress_port(self) -> int | None:
|
||||
"""Return Ingress port."""
|
||||
if not self.with_ingress:
|
||||
return None
|
||||
@@ -351,8 +420,11 @@ class Addon(AddonModel):
|
||||
return port
|
||||
|
||||
@property
|
||||
def ingress_panel(self) -> Optional[bool]:
|
||||
def ingress_panel(self) -> bool | None:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
if not self.with_ingress:
|
||||
return None
|
||||
|
||||
return self.persist[ATTR_INGRESS_PANEL]
|
||||
|
||||
@ingress_panel.setter
|
||||
@@ -361,19 +433,19 @@ class Addon(AddonModel):
|
||||
self.persist[ATTR_INGRESS_PANEL] = value
|
||||
|
||||
@property
|
||||
def audio_output(self) -> Optional[str]:
|
||||
def audio_output(self) -> str | None:
|
||||
"""Return a pulse profile for output or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
return self.persist.get(ATTR_AUDIO_OUTPUT)
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value: Optional[str]):
|
||||
def audio_output(self, value: str | None):
|
||||
"""Set audio output profile settings."""
|
||||
self.persist[ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self) -> Optional[str]:
|
||||
def audio_input(self) -> str | None:
|
||||
"""Return pulse profile for input or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
@@ -381,12 +453,12 @@ class Addon(AddonModel):
|
||||
return self.persist.get(ATTR_AUDIO_INPUT)
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value: Optional[str]) -> None:
|
||||
def audio_input(self, value: str | None) -> None:
|
||||
"""Set audio input settings."""
|
||||
self.persist[ATTR_AUDIO_INPUT] = value
|
||||
|
||||
@property
|
||||
def image(self) -> Optional[str]:
|
||||
def image(self) -> str | None:
|
||||
"""Return image name of add-on."""
|
||||
return self.persist.get(ATTR_IMAGE)
|
||||
|
||||
@@ -438,6 +510,11 @@ class Addon(AddonModel):
|
||||
|
||||
return options_schema.pwned
|
||||
|
||||
@property
|
||||
def loaded(self) -> bool:
|
||||
"""Is add-on loaded."""
|
||||
return bool(self._listeners)
|
||||
|
||||
def save_persist(self) -> None:
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
@@ -506,8 +583,11 @@ class Addon(AddonModel):
|
||||
|
||||
raise AddonConfigurationError()
|
||||
|
||||
async def remove_data(self) -> None:
|
||||
"""Remove add-on data."""
|
||||
async def unload(self) -> None:
|
||||
"""Unload add-on and remove data."""
|
||||
for listener in self._listeners:
|
||||
self.sys_bus.remove_listener(listener)
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
return
|
||||
|
||||
@@ -613,27 +693,18 @@ class Addon(AddonModel):
|
||||
# Start Add-on
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerRequestError as err:
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
except DockerError as err:
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.state = AddonState.STARTED
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop add-on."""
|
||||
self._manual_stop = True
|
||||
try:
|
||||
await self.instance.stop()
|
||||
except DockerRequestError as err:
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
except DockerError as err:
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.state = AddonState.STOPPED
|
||||
|
||||
async def restart(self) -> None:
|
||||
"""Restart add-on."""
|
||||
@@ -681,16 +752,18 @@ class Addon(AddonModel):
|
||||
try:
|
||||
command_return = await self.instance.run_inside(command)
|
||||
if command_return.exit_code != 0:
|
||||
_LOGGER.error(
|
||||
"Pre-/Post backup command returned error code: %s",
|
||||
command_return.exit_code,
|
||||
_LOGGER.debug(
|
||||
"Pre-/Post backup command failed with: %s", command_return.output
|
||||
)
|
||||
raise AddonsError(
|
||||
f"Pre-/Post backup command returned error code: {command_return.exit_code}",
|
||||
_LOGGER.error,
|
||||
)
|
||||
raise AddonsError()
|
||||
except DockerError as err:
|
||||
_LOGGER.error(
|
||||
"Failed running pre-/post backup command %s: %s", command, err
|
||||
)
|
||||
raise AddonsError() from err
|
||||
raise AddonsError(
|
||||
f"Failed running pre-/post backup command {command}: {str(err)}",
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
|
||||
async def backup(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Backup state of an add-on."""
|
||||
@@ -872,6 +945,10 @@ class Addon(AddonModel):
|
||||
)
|
||||
raise AddonsError() from err
|
||||
|
||||
# Is add-on loaded
|
||||
if not self.loaded:
|
||||
await self.load()
|
||||
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == AddonState.STARTED:
|
||||
return await self.start()
|
||||
@@ -884,3 +961,36 @@ class Addon(AddonModel):
|
||||
Return Coroutine.
|
||||
"""
|
||||
return self.instance.check_trust()
|
||||
|
||||
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
|
||||
"""Set addon state from container state."""
|
||||
if event.name != self.instance.name:
|
||||
return
|
||||
|
||||
if event.state in [
|
||||
ContainerState.RUNNING,
|
||||
ContainerState.HEALTHY,
|
||||
ContainerState.UNHEALTHY,
|
||||
]:
|
||||
self._manual_stop = False
|
||||
self.state = AddonState.STARTED
|
||||
elif event.state == ContainerState.STOPPED:
|
||||
self.state = AddonState.STOPPED
|
||||
elif event.state == ContainerState.FAILED:
|
||||
self.state = AddonState.ERROR
|
||||
|
||||
async def watchdog_container(self, event: DockerContainerStateEvent) -> None:
|
||||
"""Process state changes in addon container and restart if necessary."""
|
||||
if event.name != self.instance.name:
|
||||
return
|
||||
|
||||
# Skip watchdog if not enabled or manual stopped
|
||||
if not self.watchdog or self._manual_stop:
|
||||
return
|
||||
|
||||
if event.state in [
|
||||
ContainerState.FAILED,
|
||||
ContainerState.STOPPED,
|
||||
ContainerState.UNHEALTHY,
|
||||
]:
|
||||
await self._restart_after_problem(self, event.state)
|
||||
|
@@ -15,6 +15,7 @@ from ..const import (
|
||||
META_ADDON,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..docker.interface import MAP_ARCH
|
||||
from ..exceptions import ConfigurationFileError
|
||||
from ..utils.common import FileConfiguration, find_one_filetype
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
@@ -50,6 +51,9 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
if not self._data[ATTR_BUILD_FROM]:
|
||||
return f"ghcr.io/home-assistant/{self.sys_arch.default}-base:latest"
|
||||
|
||||
if isinstance(self._data[ATTR_BUILD_FROM], str):
|
||||
return self._data[ATTR_BUILD_FROM]
|
||||
|
||||
# Evaluate correct base image
|
||||
arch = self.sys_arch.match(list(self._data[ATTR_BUILD_FROM].keys()))
|
||||
return self._data[ATTR_BUILD_FROM][arch]
|
||||
@@ -87,6 +91,7 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
"pull": True,
|
||||
"forcerm": not self.sys_dev,
|
||||
"squash": self.squash,
|
||||
"platform": MAP_ARCH[self.sys_arch.match(self.addon.arch)],
|
||||
"labels": {
|
||||
"io.hass.version": version,
|
||||
"io.hass.arch": self.sys_arch.default,
|
||||
|
@@ -1,6 +1,9 @@
|
||||
"""Add-on static data."""
|
||||
from datetime import timedelta
|
||||
from enum import Enum
|
||||
|
||||
from ..jobs.const import JobCondition
|
||||
|
||||
|
||||
class AddonBackupMode(str, Enum):
|
||||
"""Backup mode of an Add-on."""
|
||||
@@ -11,3 +14,15 @@ class AddonBackupMode(str, Enum):
|
||||
|
||||
ATTR_BACKUP = "backup"
|
||||
ATTR_CODENOTARY = "codenotary"
|
||||
WATCHDOG_RETRY_SECONDS = 10
|
||||
WATCHDOG_MAX_ATTEMPTS = 5
|
||||
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
||||
WATCHDOG_THROTTLE_MAX_CALLS = 10
|
||||
|
||||
ADDON_UPDATE_CONDITIONS = [
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.HEALTHY,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.PLUGINS_UPDATED,
|
||||
JobCondition.SUPERVISOR_UPDATED,
|
||||
]
|
||||
|
@@ -1,12 +1,10 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Optional
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||
|
||||
from supervisor.addons.const import AddonBackupMode
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
@@ -79,7 +77,7 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..docker.const import Capabilities
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
||||
from .options import AddonOptions, UiOptions
|
||||
from .validate import RE_SERVICE, RE_VOLUME
|
||||
|
||||
@@ -125,7 +123,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_BOOT]
|
||||
|
||||
@property
|
||||
def auto_update(self) -> Optional[bool]:
|
||||
def auto_update(self) -> bool | None:
|
||||
"""Return if auto update is enable."""
|
||||
return None
|
||||
|
||||
@@ -150,22 +148,22 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def uuid(self) -> Optional[str]:
|
||||
def uuid(self) -> str | None:
|
||||
"""Return an API token for this add-on."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
def supervisor_token(self) -> str | None:
|
||||
"""Return access token for Supervisor API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
def ingress_token(self) -> str | None:
|
||||
"""Return access token for Supervisor API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_entry(self) -> Optional[str]:
|
||||
def ingress_entry(self) -> str | None:
|
||||
"""Return ingress external URL."""
|
||||
return None
|
||||
|
||||
@@ -175,7 +173,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self) -> Optional[str]:
|
||||
def long_description(self) -> str | None:
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
@@ -245,32 +243,32 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data.get(ATTR_DISCOVERY, [])
|
||||
|
||||
@property
|
||||
def ports_description(self) -> Optional[dict[str, str]]:
|
||||
def ports_description(self) -> dict[str, str] | None:
|
||||
"""Return descriptions of ports."""
|
||||
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[dict[str, Optional[int]]]:
|
||||
def ports(self) -> dict[str, int | None] | None:
|
||||
"""Return ports of add-on."""
|
||||
return self.data.get(ATTR_PORTS)
|
||||
|
||||
@property
|
||||
def ingress_url(self) -> Optional[str]:
|
||||
def ingress_url(self) -> str | None:
|
||||
"""Return URL to ingress url."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def webui(self) -> Optional[str]:
|
||||
def webui(self) -> str | None:
|
||||
"""Return URL to webui or None."""
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def watchdog(self) -> Optional[str]:
|
||||
def watchdog(self) -> str | None:
|
||||
"""Return URL to for watchdog or None."""
|
||||
return self.data.get(ATTR_WATCHDOG)
|
||||
|
||||
@property
|
||||
def ingress_port(self) -> Optional[int]:
|
||||
def ingress_port(self) -> int | None:
|
||||
"""Return Ingress port."""
|
||||
return None
|
||||
|
||||
@@ -315,7 +313,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
|
||||
|
||||
@property
|
||||
def environment(self) -> Optional[dict[str, str]]:
|
||||
def environment(self) -> dict[str, str] | None:
|
||||
"""Return environment of add-on."""
|
||||
return self.data.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@@ -364,12 +362,12 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data.get(ATTR_BACKUP_EXCLUDE, [])
|
||||
|
||||
@property
|
||||
def backup_pre(self) -> Optional[str]:
|
||||
def backup_pre(self) -> str | None:
|
||||
"""Return pre-backup command."""
|
||||
return self.data.get(ATTR_BACKUP_PRE)
|
||||
|
||||
@property
|
||||
def backup_post(self) -> Optional[str]:
|
||||
def backup_post(self) -> str | None:
|
||||
"""Return post-backup command."""
|
||||
return self.data.get(ATTR_BACKUP_POST)
|
||||
|
||||
@@ -394,7 +392,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_INGRESS]
|
||||
|
||||
@property
|
||||
def ingress_panel(self) -> Optional[bool]:
|
||||
def ingress_panel(self) -> bool | None:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return None
|
||||
|
||||
@@ -444,7 +442,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def with_tmpfs(self) -> Optional[str]:
|
||||
def with_tmpfs(self) -> str | None:
|
||||
"""Return if tmp is in memory of add-on."""
|
||||
return self.data[ATTR_TMPFS]
|
||||
|
||||
@@ -464,12 +462,12 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_VIDEO]
|
||||
|
||||
@property
|
||||
def homeassistant_version(self) -> Optional[str]:
|
||||
def homeassistant_version(self) -> str | None:
|
||||
"""Return min Home Assistant version they needed by Add-on."""
|
||||
return self.data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def url(self) -> Optional[str]:
|
||||
def url(self) -> str | None:
|
||||
"""Return URL of add-on."""
|
||||
return self.data.get(ATTR_URL)
|
||||
|
||||
@@ -512,7 +510,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.sys_arch.default
|
||||
|
||||
@property
|
||||
def image(self) -> Optional[str]:
|
||||
def image(self) -> str | None:
|
||||
"""Generate image name from data."""
|
||||
return self._image(self.data)
|
||||
|
||||
@@ -573,7 +571,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> Optional[list[dict[any, any]]]:
|
||||
def schema_ui(self) -> list[dict[any, any]] | None:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
@@ -592,7 +590,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return ATTR_CODENOTARY in self.data
|
||||
|
||||
@property
|
||||
def codenotary(self) -> Optional[str]:
|
||||
def codenotary(self) -> str | None:
|
||||
"""Return Signer email address for CAS."""
|
||||
return self.data.get(ATTR_CODENOTARY)
|
||||
|
||||
@@ -616,7 +614,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version: Optional[AwesomeVersion] = config.get(ATTR_HOMEASSISTANT)
|
||||
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
|
||||
try:
|
||||
return self.sys_homeassistant.version >= version
|
||||
except (AwesomeVersionException, TypeError):
|
||||
@@ -640,7 +638,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Uninstall this add-on."""
|
||||
return self.sys_addons.uninstall(self.slug)
|
||||
|
||||
def update(self, backup: Optional[bool] = False) -> Awaitable[None]:
|
||||
def update(self, backup: bool | None = False) -> Awaitable[None]:
|
||||
"""Update this add-on."""
|
||||
return self.sys_addons.update(self.slug, backup=backup)
|
||||
|
||||
|
@@ -3,7 +3,7 @@ import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -293,7 +293,7 @@ class UiOptions(CoreSysAttributes):
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node: dict[str, Union[str, bool, float, list[str]]] = {"name": key}
|
||||
ui_node: dict[str, str | bool | float | list[str]] = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
|
@@ -7,8 +7,6 @@ import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.addons.const import AddonBackupMode
|
||||
|
||||
from ..const import (
|
||||
ARCH_ALL,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
@@ -110,7 +108,7 @@ from ..validate import (
|
||||
uuid_match,
|
||||
version_tag,
|
||||
)
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
||||
from .options import RE_SCHEMA_ELEMENT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -353,8 +351,9 @@ SCHEMA_ADDON_CONFIG = vol.All(
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
|
||||
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Any(
|
||||
vol.Match(RE_DOCKER_IMAGE_BUILD),
|
||||
vol.Schema({vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}),
|
||||
),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({str: str}),
|
||||
|
@@ -1,11 +1,13 @@
|
||||
"""Init file for Supervisor RESTful API."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import APIAddonNotInstalled
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
@@ -31,11 +33,13 @@ from .security import APISecurity
|
||||
from .services import APIServices
|
||||
from .store import APIStore
|
||||
from .supervisor import APISupervisor
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
MAX_CLIENT_SIZE: int = 1024**2 * 16
|
||||
MAX_LINE_SIZE: int = 24570
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
@@ -51,11 +55,15 @@ class RestAPI(CoreSysAttributes):
|
||||
self.security.system_validation,
|
||||
self.security.token_validation,
|
||||
],
|
||||
handler_args={
|
||||
"max_line_size": MAX_LINE_SIZE,
|
||||
"max_field_size": MAX_LINE_SIZE,
|
||||
},
|
||||
)
|
||||
|
||||
# service stuff
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||
self._site: Optional[web.TCPSite] = None
|
||||
self._site: web.TCPSite | None = None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
@@ -269,6 +277,10 @@ class RestAPI(CoreSysAttributes):
|
||||
"/resolution/issue/{issue}",
|
||||
api_resolution.dismiss_issue,
|
||||
),
|
||||
web.get(
|
||||
"/resolution/issue/{issue}/suggestions",
|
||||
api_resolution.suggestions_for_issue,
|
||||
),
|
||||
web.post("/resolution/healthcheck", api_resolution.healthcheck),
|
||||
]
|
||||
)
|
||||
@@ -378,7 +390,6 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons", api_addons.list),
|
||||
web.get("/addons/{addon}/info", api_addons.info),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
@@ -396,6 +407,25 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
# Legacy routing to support requests for not installed addons
|
||||
api_store = APIStore()
|
||||
api_store.coresys = self.coresys
|
||||
|
||||
@api_process
|
||||
async def addons_addon_info(request: web.Request) -> dict[str, Any]:
|
||||
"""Route to store if info requested for not installed addon."""
|
||||
try:
|
||||
return await api_addons.info(request)
|
||||
except APIAddonNotInstalled:
|
||||
# Route to store/{addon}/info but add missing fields
|
||||
return dict(
|
||||
await api_store.addons_addon_info_wrapped(request),
|
||||
state=AddonState.UNKNOWN,
|
||||
options=self.sys_addons.store[request.match_info["addon"]].options,
|
||||
)
|
||||
|
||||
self.webapp.add_routes([web.get("/addons/{addon}/info", addons_addon_info)])
|
||||
|
||||
def _register_ingress(self) -> None:
|
||||
"""Register Ingress functions."""
|
||||
api_ingress = APIIngress()
|
||||
@@ -418,11 +448,13 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/backups", api_backups.list),
|
||||
web.get("/backups/info", api_backups.info),
|
||||
web.post("/backups/options", api_backups.options),
|
||||
web.post("/backups/reload", api_backups.reload),
|
||||
web.post("/backups/new/full", api_backups.backup_full),
|
||||
web.post("/backups/new/partial", api_backups.backup_partial),
|
||||
web.post("/backups/new/upload", api_backups.upload),
|
||||
web.get("/backups/{slug}/info", api_backups.info),
|
||||
web.get("/backups/{slug}/info", api_backups.backup_info),
|
||||
web.delete("/backups/{slug}", api_backups.remove),
|
||||
web.post("/backups/{slug}/restore/full", api_backups.restore_full),
|
||||
web.post(
|
||||
|
@@ -96,7 +96,13 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import APIError, APIForbidden, PwnedError, PwnedSecret
|
||||
from ..exceptions import (
|
||||
APIAddonNotInstalled,
|
||||
APIError,
|
||||
APIForbidden,
|
||||
PwnedError,
|
||||
PwnedSecret,
|
||||
)
|
||||
from ..validate import docker_ports
|
||||
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate, json_loads
|
||||
@@ -140,7 +146,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if not addon:
|
||||
raise APIError(f"Addon {addon_slug} does not exist")
|
||||
if not isinstance(addon, Addon) or not addon.is_installed:
|
||||
raise APIError("Addon is not installed")
|
||||
raise APIAddonNotInstalled("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@@ -177,7 +183,6 @@ class APIAddons(CoreSysAttributes):
|
||||
"""Reload all add-on data from store."""
|
||||
await asyncio.shield(self.sys_store.reload())
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
|
@@ -9,13 +9,14 @@ from aiohttp import web
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
import voluptuous as vol
|
||||
|
||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT
|
||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
ATTR_BACKUPS,
|
||||
ATTR_COMPRESSED,
|
||||
ATTR_CONTENT,
|
||||
ATTR_DATE,
|
||||
ATTR_DAYS_UNTIL_STALE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_NAME,
|
||||
@@ -24,6 +25,7 @@ from ..const import (
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_SUPERVISOR_VERSION,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
)
|
||||
@@ -68,6 +70,12 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIBackups(CoreSysAttributes):
|
||||
"""Handle RESTful API for backups functions."""
|
||||
@@ -79,27 +87,30 @@ class APIBackups(CoreSysAttributes):
|
||||
raise APIError("Backup does not exist")
|
||||
return backup
|
||||
|
||||
def _list_backups(self):
|
||||
"""Return list of backups."""
|
||||
return [
|
||||
{
|
||||
ATTR_SLUG: backup.slug,
|
||||
ATTR_NAME: backup.name,
|
||||
ATTR_DATE: backup.date,
|
||||
ATTR_TYPE: backup.sys_type,
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_CONTENT: {
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||
ATTR_ADDONS: backup.addon_list,
|
||||
ATTR_FOLDERS: backup.folders,
|
||||
},
|
||||
}
|
||||
for backup in self.sys_backups.list_backups
|
||||
]
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return backup list."""
|
||||
data_backups = []
|
||||
for backup in self.sys_backups.list_backups:
|
||||
data_backups.append(
|
||||
{
|
||||
ATTR_SLUG: backup.slug,
|
||||
ATTR_NAME: backup.name,
|
||||
ATTR_DATE: backup.date,
|
||||
ATTR_TYPE: backup.sys_type,
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_CONTENT: {
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||
ATTR_ADDONS: backup.addon_list,
|
||||
ATTR_FOLDERS: backup.folders,
|
||||
},
|
||||
}
|
||||
)
|
||||
data_backups = self._list_backups()
|
||||
|
||||
if request.path == "/snapshots":
|
||||
# Kept for backwards compability
|
||||
@@ -107,6 +118,24 @@ class APIBackups(CoreSysAttributes):
|
||||
|
||||
return {ATTR_BACKUPS: data_backups}
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return backup list and manager info."""
|
||||
return {
|
||||
ATTR_BACKUPS: self._list_backups(),
|
||||
ATTR_DAYS_UNTIL_STALE: self.sys_backups.days_until_stale,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set backup manager options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DAYS_UNTIL_STALE in body:
|
||||
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
|
||||
|
||||
self.sys_backups.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload backup list."""
|
||||
@@ -114,7 +143,7 @@ class APIBackups(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def backup_info(self, request):
|
||||
"""Return backup info."""
|
||||
backup = self._extract_slug(request)
|
||||
|
||||
@@ -137,6 +166,7 @@ class APIBackups(CoreSysAttributes):
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: backup.repositories,
|
||||
|
@@ -120,25 +120,25 @@ class APIHost(CoreSysAttributes):
|
||||
def service_start(self, request):
|
||||
"""Start a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.start(unit))
|
||||
return [asyncio.shield(self.sys_host.services.start(unit))]
|
||||
|
||||
@api_process
|
||||
def service_stop(self, request):
|
||||
"""Stop a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.stop(unit))
|
||||
return [asyncio.shield(self.sys_host.services.stop(unit))]
|
||||
|
||||
@api_process
|
||||
def service_reload(self, request):
|
||||
"""Reload a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.reload(unit))
|
||||
return [asyncio.shield(self.sys_host.services.reload(unit))]
|
||||
|
||||
@api_process
|
||||
def service_restart(self, request):
|
||||
"""Restart a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.restart(unit))
|
||||
return [asyncio.shield(self.sys_host.services.restart(unit))]
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
|
@@ -2,7 +2,7 @@
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import ClientTimeout, hdrs, web
|
||||
@@ -86,7 +86,7 @@ class APIIngress(CoreSysAttributes):
|
||||
@require_home_assistant
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
) -> web.Response | web.StreamResponse | web.WebSocketResponse:
|
||||
"""Route data to Supervisor ingress service."""
|
||||
|
||||
# Check Ingress Session
|
||||
@@ -157,7 +157,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
async def _handle_request(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> Union[web.Response, web.StreamResponse]:
|
||||
) -> web.Response | web.StreamResponse:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon)
|
||||
@@ -216,9 +216,7 @@ class APIIngress(CoreSysAttributes):
|
||||
return response
|
||||
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: str
|
||||
) -> Union[CIMultiDict, dict[str, str]]:
|
||||
def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, str]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
|
||||
|
@@ -30,6 +30,7 @@ from ..const import (
|
||||
ATTR_PARENT,
|
||||
ATTR_PRIMARY,
|
||||
ATTR_PSK,
|
||||
ATTR_READY,
|
||||
ATTR_SIGNAL,
|
||||
ATTR_SSID,
|
||||
ATTR_SUPERVISOR_INTERNET,
|
||||
@@ -89,6 +90,7 @@ def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
|
||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||
ATTR_READY: config.ready,
|
||||
}
|
||||
|
||||
|
||||
@@ -194,12 +196,14 @@ class APINetwork(CoreSysAttributes):
|
||||
for key, config in body.items():
|
||||
if key == ATTR_IPV4:
|
||||
interface.ipv4 = attr.evolve(
|
||||
interface.ipv4 or IpConfig(InterfaceMethod.STATIC, [], None, []),
|
||||
interface.ipv4
|
||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_IPV6:
|
||||
interface.ipv6 = attr.evolve(
|
||||
interface.ipv6 or IpConfig(InterfaceMethod.STATIC, [], None, []),
|
||||
interface.ipv6
|
||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_WIFI:
|
||||
@@ -218,7 +222,9 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload network data."""
|
||||
return asyncio.shield(self.sys_host.network.update())
|
||||
return asyncio.shield(
|
||||
self.sys_host.network.update(force_connectivity_check=True)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
||||
@@ -255,6 +261,7 @@ class APINetwork(CoreSysAttributes):
|
||||
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||
None,
|
||||
)
|
||||
|
||||
ipv6_config = None
|
||||
@@ -264,6 +271,7 @@ class APINetwork(CoreSysAttributes):
|
||||
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||
None,
|
||||
)
|
||||
|
||||
vlan_interface = Interface(
|
||||
|
@@ -1,14 +1,14 @@
|
||||
|
||||
function loadES5() {
|
||||
var el = document.createElement('script');
|
||||
el.src = '/api/hassio/app/frontend_es5/entrypoint.f8f83860.js';
|
||||
el.src = '/api/hassio/app/frontend_es5/entrypoint.169d7fb4.js';
|
||||
document.body.appendChild(el);
|
||||
}
|
||||
if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) {
|
||||
loadES5();
|
||||
} else {
|
||||
try {
|
||||
new Function("import('/api/hassio/app/frontend_latest/entrypoint.b6cf778b.js')")();
|
||||
new Function("import('/api/hassio/app/frontend_latest/entrypoint.24687610.js')")();
|
||||
} catch (err) {
|
||||
loadES5();
|
||||
}
|
||||
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
1
supervisor/api/panel/frontend_es5/16aa06d8.js
Normal file
1
supervisor/api/panel/frontend_es5/16aa06d8.js
Normal file
@@ -0,0 +1 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[639],{71639:function(s){s.exports=[]}}]);
|
1
supervisor/api/panel/frontend_es5/19929d68.js
Normal file
1
supervisor/api/panel/frontend_es5/19929d68.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/19929d68.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/19929d68.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/231b432f.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/231b432f.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/4006b363.js
Normal file
1
supervisor/api/panel/frontend_es5/4006b363.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/4006b363.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/4006b363.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
1
supervisor/api/panel/frontend_es5/7ec6cac9.js
Normal file
1
supervisor/api/panel/frontend_es5/7ec6cac9.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/7ec6cac9.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/7ec6cac9.js.gz
Normal file
Binary file not shown.
Binary file not shown.
2
supervisor/api/panel/frontend_es5/991beab2.js
Normal file
2
supervisor/api/panel/frontend_es5/991beab2.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/991beab2.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/991beab2.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/9c862ee8.js
Normal file
1
supervisor/api/panel/frontend_es5/9c862ee8.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/9c862ee8.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/9c862ee8.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/a63f43e4.js
Normal file
1
supervisor/api/panel/frontend_es5/a63f43e4.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/a63f43e4.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/a63f43e4.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
1
supervisor/api/panel/frontend_es5/c3ebf408.js
Normal file
1
supervisor/api/panel/frontend_es5/c3ebf408.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/c3ebf408.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/c3ebf408.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
2
supervisor/api/panel/frontend_es5/entrypoint.169d7fb4.js
Normal file
2
supervisor/api/panel/frontend_es5/entrypoint.169d7fb4.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/entrypoint.169d7fb4.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/entrypoint.169d7fb4.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"entrypoint.js": "/api/hassio/app/frontend_es5/entrypoint.f8f83860.js"
|
||||
"entrypoint.js": "/api/hassio/app/frontend_es5/entrypoint.169d7fb4.js"
|
||||
}
|
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_latest/139ce644.js.gz
Normal file
BIN
supervisor/api/panel/frontend_latest/139ce644.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_latest/139ce644.js.map
Normal file
1
supervisor/api/panel/frontend_latest/139ce644.js.map
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_latest/4e85eb44.js.gz
Normal file
BIN
supervisor/api/panel/frontend_latest/4e85eb44.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_latest/4e85eb44.js.map
Normal file
1
supervisor/api/panel/frontend_latest/4e85eb44.js.map
Normal file
File diff suppressed because one or more lines are too long
@@ -98,4 +98,4 @@
|
||||
margin-top: 12px;
|
||||
}
|
||||
`]}}]}}),i.oi)},7628:(e,r,t)=>{t.r(r),t.d(r,{dump:()=>i.$w});var i=t(7426)}}]);
|
||||
//# sourceMappingURL=faa8eea5.js.map
|
||||
//# sourceMappingURL=50e67b8b.js.map
|
BIN
supervisor/api/panel/frontend_latest/50e67b8b.js.gz
Normal file
BIN
supervisor/api/panel/frontend_latest/50e67b8b.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_latest/650fc94a.js.gz
Normal file
BIN
supervisor/api/panel/frontend_latest/650fc94a.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_latest/650fc94a.js.map
Normal file
1
supervisor/api/panel/frontend_latest/650fc94a.js.map
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
578
supervisor/api/panel/frontend_latest/7b9d33d3.js
Normal file
578
supervisor/api/panel/frontend_latest/7b9d33d3.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_latest/7b9d33d3.js.gz
Normal file
BIN
supervisor/api/panel/frontend_latest/7b9d33d3.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_latest/7b9d33d3.js.map
Normal file
1
supervisor/api/panel/frontend_latest/7b9d33d3.js.map
Normal file
File diff suppressed because one or more lines are too long
@@ -68,5 +68,5 @@
|
||||
ha-icon-button {
|
||||
color: var(--secondary-text-color);
|
||||
}
|
||||
`]}},{kind:"method",key:"_handleMenuAction",value:function(e){switch(e.detail.index){case 0:this._downloadClicked();break;case 1:this._deleteClicked()}}},{kind:"method",key:"_restoreClicked",value:async function(){const e=this._backupContent.backupDetails();this._restoringBackup=!0,"full"===this._backupContent.backupType?await this._fullRestoreClicked(e):await this._partialRestoreClicked(e),this._restoringBackup=!1}},{kind:"method",key:"_partialRestoreClicked",value:async function(e){var t,i,r,o;void 0===(null===(t=this._dialogParams)||void 0===t?void 0:t.supervisor)||"running"===(null===(i=this._dialogParams)||void 0===i?void 0:i.supervisor.info.state)?await(0,d.g7)(this,{title:"Are you sure you want partially to restore this backup?",confirmText:"restore",dismissText:"cancel"})&&(null!==(r=this._dialogParams)&&void 0!==r&&r.onboarding?((0,s.B)(this,"restoring"),fetch(`/api/hassio/backups/${this._backup.slug}/restore/partial`,{method:"POST",body:JSON.stringify(e)}),this.closeDialog()):this.hass.callApi("POST",`hassio/${(0,f.I)(this.hass.config.version,2021,9)?"backups":"snapshots"}/${this._backup.slug}/restore/partial`,e).then((()=>{this.closeDialog()}),(e=>{this._error=e.body.message}))):await(0,d.Ys)(this,{title:"Could not restore backup",text:`Restoring a backup is not possible right now because the system is in ${null===(o=this._dialogParams)||void 0===o?void 0:o.supervisor.info.state} state.`})}},{kind:"method",key:"_fullRestoreClicked",value:async function(e){var t,i,r,o;void 0===(null===(t=this._dialogParams)||void 0===t?void 0:t.supervisor)||"running"===(null===(i=this._dialogParams)||void 0===i?void 0:i.supervisor.info.state)?await(0,d.g7)(this,{title:"Are you sure you want to wipe your system and restore this backup?",confirmText:"restore",dismissText:"cancel"})&&(null!==(r=this._dialogParams)&&void 0!==r&&r.onboarding?((0,s.B)(this,"restoring"),fetch(`/api/hassio/backups/${this._backup.slug}/restore/full`,{method:"POST",body:JSON.stringify(e)}),this.closeDialog()):this.hass.callApi("POST",`hassio/${(0,f.I)(this.hass.config.version,2021,9)?"backups":"snapshots"}/${this._backup.slug}/restore/full`,e).then((()=>{this.closeDialog()}),(e=>{this._error=e.body.message}))):await(0,d.Ys)(this,{title:"Could not restore backup",text:`Restoring a backup is not possible right now because the system is in ${null===(o=this._dialogParams)||void 0===o?void 0:o.supervisor.info.state} state.`})}},{kind:"method",key:"_deleteClicked",value:async function(){await(0,d.g7)(this,{title:"Are you sure you want to delete this backup?",confirmText:"delete",dismissText:"cancel"})&&this.hass.callApi((0,f.I)(this.hass.config.version,2021,9)?"DELETE":"POST","hassio/"+((0,f.I)(this.hass.config.version,2021,9)?`backups/${this._backup.slug}`:`snapshots/${this._backup.slug}/remove`)).then((()=>{this._dialogParams.onDelete&&this._dialogParams.onDelete(),this.closeDialog()}),(e=>{this._error=e.body.message}))}},{kind:"method",key:"_downloadClicked",value:async function(){let e;try{e=await(0,n.iI)(this.hass,`/api/hassio/${(0,f.I)(this.hass.config.version,2021,9)?"backups":"snapshots"}/${this._backup.slug}/download`)}catch(e){return void await(0,d.Ys)(this,{text:(0,l.js)(e)})}if(window.location.href.includes("ui.nabu.casa")){if(!await(0,d.g7)(this,{title:"Potential slow download",text:"Downloading backups over the Nabu Casa URL will take some time, it is recomended to use your local URL instead, do you want to continue?",confirmText:"continue",dismissText:"cancel"}))return}(0,h.N)(e.path,`home_assistant_backup_${(0,a.l)(this._computeName)}.tar`)}},{kind:"get",key:"_computeName",value:function(){return this._backup?this._backup.name||this._backup.slug:"Unnamed backup"}}]}}),r.oi)}))},3447:(e,t,i)=>{i.d(t,{l:()=>r});const r=(e,t="_")=>{const i="àáäâãåăæąçćčđďèéěėëêęğǵḧìíïîįłḿǹńňñòóöôœøṕŕřßşśšșťțùúüûǘůűūųẃẍÿýźžż·/_,:;",r=`aaaaaaaaacccddeeeeeeegghiiiiilmnnnnooooooprrsssssttuuuuuuuuuwxyyzzz${t}${t}${t}${t}${t}${t}`,o=new RegExp(i.split("").join("|"),"g");return e.toString().toLowerCase().replace(/\s+/g,t).replace(o,(e=>r.charAt(i.indexOf(e)))).replace(/&/g,`${t}and${t}`).replace(/[^\w-]+/g,"").replace(/-/g,t).replace(new RegExp(`(${t})\\1+`,"g"),"$1").replace(new RegExp(`^${t}+`),"").replace(new RegExp(`${t}+$`),"")}},2814:(e,t,i)=>{i.d(t,{iI:()=>r});location.protocol,location.host;const r=(e,t)=>e.callWS({type:"auth/sign_path",path:t})},5936:(e,t,i)=>{i.d(t,{N:()=>r});const r=(e,t="")=>{const i=document.createElement("a");i.target="_blank",i.href=e,i.download=t,document.body.appendChild(i),i.dispatchEvent(new MouseEvent("click")),document.body.removeChild(i)}}}]);
|
||||
//# sourceMappingURL=1a587b90.js.map
|
||||
`]}},{kind:"method",key:"_handleMenuAction",value:function(e){switch(e.detail.index){case 0:this._downloadClicked();break;case 1:this._deleteClicked()}}},{kind:"method",key:"_restoreClicked",value:async function(){const e=this._backupContent.backupDetails();this._restoringBackup=!0,"full"===this._backupContent.backupType?await this._fullRestoreClicked(e):await this._partialRestoreClicked(e),this._restoringBackup=!1}},{kind:"method",key:"_partialRestoreClicked",value:async function(e){var t,i,r,o;if(void 0===(null===(t=this._dialogParams)||void 0===t?void 0:t.supervisor)||"running"===(null===(i=this._dialogParams)||void 0===i?void 0:i.supervisor.info.state)){if(await(0,d.g7)(this,{title:"Are you sure you want partially to restore this backup?",confirmText:"restore",dismissText:"cancel"}))if(null!==(r=this._dialogParams)&&void 0!==r&&r.onboarding)(0,s.B)(this,"restoring"),await fetch(`/api/hassio/backups/${this._backup.slug}/restore/partial`,{method:"POST",body:JSON.stringify(e)}),this.closeDialog();else try{await this.hass.callApi("POST",`hassio/${(0,f.I)(this.hass.config.version,2021,9)?"backups":"snapshots"}/${this._backup.slug}/restore/partial`,e),this.closeDialog()}catch(e){this._error=e.body.message}}else await(0,d.Ys)(this,{title:"Could not restore backup",text:`Restoring a backup is not possible right now because the system is in ${null===(o=this._dialogParams)||void 0===o?void 0:o.supervisor.info.state} state.`})}},{kind:"method",key:"_fullRestoreClicked",value:async function(e){var t,i,r,o;void 0===(null===(t=this._dialogParams)||void 0===t?void 0:t.supervisor)||"running"===(null===(i=this._dialogParams)||void 0===i?void 0:i.supervisor.info.state)?await(0,d.g7)(this,{title:"Are you sure you want to wipe your system and restore this backup?",confirmText:"restore",dismissText:"cancel"})&&(null!==(r=this._dialogParams)&&void 0!==r&&r.onboarding?((0,s.B)(this,"restoring"),fetch(`/api/hassio/backups/${this._backup.slug}/restore/full`,{method:"POST",body:JSON.stringify(e)}),this.closeDialog()):this.hass.callApi("POST",`hassio/${(0,f.I)(this.hass.config.version,2021,9)?"backups":"snapshots"}/${this._backup.slug}/restore/full`,e).then((()=>{this.closeDialog()}),(e=>{this._error=e.body.message}))):await(0,d.Ys)(this,{title:"Could not restore backup",text:`Restoring a backup is not possible right now because the system is in ${null===(o=this._dialogParams)||void 0===o?void 0:o.supervisor.info.state} state.`})}},{kind:"method",key:"_deleteClicked",value:async function(){await(0,d.g7)(this,{title:"Are you sure you want to delete this backup?",confirmText:"delete",dismissText:"cancel"})&&this.hass.callApi((0,f.I)(this.hass.config.version,2021,9)?"DELETE":"POST","hassio/"+((0,f.I)(this.hass.config.version,2021,9)?`backups/${this._backup.slug}`:`snapshots/${this._backup.slug}/remove`)).then((()=>{this._dialogParams.onDelete&&this._dialogParams.onDelete(),this.closeDialog()}),(e=>{this._error=e.body.message}))}},{kind:"method",key:"_downloadClicked",value:async function(){let e;try{e=await(0,n.iI)(this.hass,`/api/hassio/${(0,f.I)(this.hass.config.version,2021,9)?"backups":"snapshots"}/${this._backup.slug}/download`)}catch(e){return void await(0,d.Ys)(this,{text:(0,l.js)(e)})}if(window.location.href.includes("ui.nabu.casa")){if(!await(0,d.g7)(this,{title:"Potential slow download",text:"Downloading backups over the Nabu Casa URL will take some time, it is recomended to use your local URL instead, do you want to continue?",confirmText:"continue",dismissText:"cancel"}))return}(0,h.N)(e.path,`home_assistant_backup_${(0,a.l)(this._computeName)}.tar`)}},{kind:"get",key:"_computeName",value:function(){return this._backup?this._backup.name||this._backup.slug:"Unnamed backup"}}]}}),r.oi)}))},3447:(e,t,i)=>{i.d(t,{l:()=>r});const r=(e,t="_")=>{const i="àáäâãåăæąçćčđďèéěėëêęğǵḧìíïîįłḿǹńňñòóöôœøṕŕřßşśšșťțùúüûǘůűūųẃẍÿýźžż·/_,:;",r=`aaaaaaaaacccddeeeeeeegghiiiiilmnnnnooooooprrsssssttuuuuuuuuuwxyyzzz${t}${t}${t}${t}${t}${t}`,o=new RegExp(i.split("").join("|"),"g");return e.toString().toLowerCase().replace(/\s+/g,t).replace(o,(e=>r.charAt(i.indexOf(e)))).replace(/&/g,`${t}and${t}`).replace(/[^\w-]+/g,"").replace(/-/g,t).replace(new RegExp(`(${t})\\1+`,"g"),"$1").replace(new RegExp(`^${t}+`),"").replace(new RegExp(`${t}+$`),"")}},2814:(e,t,i)=>{i.d(t,{iI:()=>r});location.protocol,location.host;const r=(e,t)=>e.callWS({type:"auth/sign_path",path:t})},5936:(e,t,i)=>{i.d(t,{N:()=>r});const r=(e,t="")=>{const i=document.createElement("a");i.target="_blank",i.href=e,i.download=t,document.body.appendChild(i),i.dispatchEvent(new MouseEvent("click")),document.body.removeChild(i)}}}]);
|
||||
//# sourceMappingURL=8bc74f44.js.map
|
BIN
supervisor/api/panel/frontend_latest/8bc74f44.js.gz
Normal file
BIN
supervisor/api/panel/frontend_latest/8bc74f44.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
75
supervisor/api/panel/frontend_latest/d4621810.js
Normal file
75
supervisor/api/panel/frontend_latest/d4621810.js
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user