Compare commits

..

1 Commits

Author SHA1 Message Date
ludeeus
e415923553 Trigger backup sync when backup is complete 2024-10-16 04:54:29 +00:00
4028 changed files with 503906 additions and 11309 deletions

View File

@@ -1,6 +1,6 @@
{
"name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
@@ -44,8 +44,5 @@
}
}
},
"mounts": [
"type=volume,target=/var/lib/docker",
"type=volume,target=/mnt/supervisor"
]
"mounts": ["type=volume,target=/var/lib/docker"]
}

View File

@@ -26,7 +26,7 @@ body:
attributes:
label: What type of installation are you running?
description: >
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
It is listed as the `Installation Type` value.
options:
- Home Assistant OS
@@ -72,9 +72,9 @@ body:
validations:
required: true
attributes:
label: System information
label: System Health information
description: >
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
Click the copy button at the bottom of the pop-up and paste it here.
[![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
@@ -83,9 +83,8 @@ body:
label: Supervisor diagnostics
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
description: >-
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/).
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry
and select 'Download diagnostics'.
Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
- type: textarea

View File

@@ -33,7 +33,7 @@ on:
- setup.py
env:
DEFAULT_PYTHON: "3.13"
DEFAULT_PYTHON: "3.12"
BUILD_NAME: supervisor
BUILD_TYPE: supervisor
@@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
with:
fetch-depth: 0
@@ -92,7 +92,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
with:
fetch-depth: 0
@@ -106,9 +106,9 @@ jobs:
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.11.0
uses: home-assistant/wheels@2024.07.1
with:
abi: cp313
abi: cp312
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
@@ -125,13 +125,13 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.8.1
uses: sigstore/cosign-installer@v3.7.0
with:
cosign-release: "v2.4.0"
@@ -160,7 +160,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2025.02.0
uses: home-assistant/builder@2024.08.2
with:
args: |
$BUILD_ARGS \
@@ -178,7 +178,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -203,11 +203,11 @@ jobs:
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2025.02.0
uses: home-assistant/builder@2024.08.2
with:
args: |
--test \

View File

@@ -8,7 +8,7 @@ on:
pull_request: ~
env:
DEFAULT_PYTHON: "3.13"
DEFAULT_PYTHON: "3.12"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
concurrency:
@@ -25,15 +25,15 @@ jobs:
name: Prepare Python dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python
id: python
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -47,7 +47,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -67,15 +67,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -87,7 +87,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -110,15 +110,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -130,7 +130,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -153,7 +153,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,15 +168,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -188,7 +188,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -212,15 +212,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -232,7 +232,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -256,15 +256,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -274,10 +274,6 @@ jobs:
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0
- name: Register pylint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/pylint.json"
@@ -292,19 +288,19 @@ jobs:
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.8.1
uses: sigstore/cosign-installer@v3.7.0
with:
cosign-release: "v2.4.0"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -339,7 +335,7 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.4.3
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
@@ -351,15 +347,15 @@ jobs:
needs: ["pytest", "prepare"]
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.2
uses: actions/cache@v4.1.1
with:
path: venv
key: |
@@ -370,7 +366,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
- name: Combine coverage results
run: |
. venv/bin/activate
@@ -378,4 +374,4 @@ jobs:
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5.4.0
uses: codecov/codecov-action@v4.6.0

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
with:
fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
- name: Run Release Drafter
uses: release-drafter/release-drafter@v6.1.0
uses: release-drafter/release-drafter@v6.0.0
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.2.1
- name: Sentry Release
uses: getsentry/action-release@v1.10.4
uses: getsentry/action-release@v1.7.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -9,7 +9,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9.1.0
- uses: actions/stale@v9.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30

View File

@@ -1,79 +0,0 @@
name: Update frontend
on:
schedule: # once a day
- cron: "0 0 * * *"
workflow_dispatch:
jobs:
check-version:
runs-on: ubuntu-latest
outputs:
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
current_version: ${{ steps.check_version.outputs.current_version }}
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Get latest frontend release
id: latest_frontend_version
uses: abatilo/release-info-action@v1.3.3
with:
owner: home-assistant
repo: frontend
- name: Check if version is up to date
id: check_version
run: |
current_version="$(cat .ha-frontend-version)"
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
if [[ ! "$current_version" < "$latest_version" ]]; then
echo "Frontend version is up to date"
echo "skip=true" >> $GITHUB_OUTPUT
fi
- name: Check if there is no open PR with this version
if: steps.check_version.outputs.skip != 'true'
id: check_existing_pr
env:
GH_TOKEN: ${{ github.token }}
run: |
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
if [[ "$PR" != "[]" ]]; then
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
echo "skip=true" >> $GITHUB_OUTPUT
fi
create-pr:
runs-on: ubuntu-latest
needs: check-version
if: needs.check-version.outputs.skip != 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Clear www folder
run: |
rm -rf supervisor/api/panel/*
- name: Update version file
run: |
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
- name: Download release assets
uses: robinraju/release-downloader@v1
with:
repository: 'home-assistant/frontend'
tag: ${{ needs.check-version.outputs.latest_version }}
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
extract: true
out-file-path: supervisor/api/panel/
- name: Create PR
uses: peter-evans/create-pull-request@v7
with:
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
branch: autoupdate-frontend
base: main
draft: true
sign-commits: true
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
body: >
Update frontend from ${{ needs.check-version.outputs.current_version }} to
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})

4
.gitmodules vendored Normal file
View File

@@ -0,0 +1,4 @@
[submodule "home-assistant-polymer"]
path = home-assistant-polymer
url = https://github.com/home-assistant/home-assistant-polymer
branch = dev

View File

@@ -1 +0,0 @@
20250221.0

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.1
rev: v0.5.7
hooks:
- id: ruff
args:
@@ -8,7 +8,7 @@ repos:
- id: ruff-format
files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v4.5.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]

View File

@@ -9,8 +9,7 @@ ENV \
ARG \
COSIGN_VERSION \
BUILD_ARCH \
QEMU_CPU
BUILD_ARCH
# Install base
WORKDIR /usr/src
@@ -29,23 +28,22 @@ RUN \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.6.1
&& pip3 install uv==0.2.21
# Install requirements
COPY requirements.txt .
RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \
setarch="linux32"; \
linux32 uv pip install --no-build -r requirements.txt; \
else \
setarch=""; \
uv pip install --no-build -r requirements.txt; \
fi \
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
&& rm -f requirements.txt
# Install Home Assistant Supervisor
COPY . supervisor
RUN \
uv pip install --no-cache -e ./supervisor \
pip3 install -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@@ -1,5 +1,5 @@
[build-system]
requires = ["setuptools~=75.8.0", "wheel~=0.45.0"]
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
build-backend = "setuptools.build_meta"
[project]
@@ -12,7 +12,7 @@ authors = [
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
]
keywords = ["docker", "home-assistant", "api"]
requires-python = ">=3.13.0"
requires-python = ">=3.12.0"
[project.urls]
"Homepage" = "https://www.home-assistant.io/"
@@ -31,7 +31,7 @@ include-package-data = true
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.13"
py-version = "3.12"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
@@ -147,7 +147,7 @@ disable = [
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
"raise-missing-from", # TRY200
# "redefined-builtin", # A001, ruff is way more stricter, needs work
"try-except-raise", # TRY203
"try-except-raise", # TRY302
"unused-argument", # ARG001, we don't use it
"unused-format-string-argument", #F507
"unused-format-string-key", # F504
@@ -223,7 +223,6 @@ testpaths = ["tests"]
norecursedirs = [".git"]
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S"
asyncio_default_fixture_loop_scope = "function"
asyncio_mode = "auto"
filterwarnings = [
"error",
@@ -290,7 +289,7 @@ lint.select = [
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY203", # Remove exception handler; error is immediately re-raised
"TRY302", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]

View File

@@ -1,29 +1,29 @@
aiodns==3.2.0
aiohttp==3.11.13
aiohttp==3.10.10
atomicwrites-homeassistant==1.4.1
attrs==25.1.0
attrs==24.2.0
awesomeversion==24.6.0
brotli==1.1.0
ciso8601==2.3.2
colorlog==6.9.0
ciso8601==2.3.1
colorlog==6.8.2
cpe==1.3.1
cryptography==44.0.1
debugpy==1.8.12
cryptography==43.0.1
debugpy==1.8.7
deepmerge==2.0
dirhash==0.5.0
docker==7.1.0
faust-cchardet==2.1.19
gitpython==3.1.44
jinja2==3.1.5
orjson==3.10.12
pulsectl==24.12.0
gitpython==3.1.43
jinja2==3.1.4
orjson==3.10.7
pulsectl==24.8.0
pyudev==0.24.3
PyYAML==6.0.2
requests==2.32.3
securetar==2025.2.1
sentry-sdk==2.22.0
setuptools==75.8.2
securetar==2024.2.1
sentry-sdk==2.16.0
setuptools==75.1.0
voluptuous==0.15.2
dbus-fast==2.34.0
dbus-fast==2.24.3
typing_extensions==4.12.2
zlib-fast==0.2.1
zlib-fast==0.2.0

View File

@@ -1,13 +1,12 @@
astroid==3.3.8
coverage==7.6.12
pre-commit==4.1.0
pylint==3.3.4
pytest-aiohttp==1.1.0
pytest-asyncio==0.25.2
pytest-cov==6.0.0
coverage==7.6.3
pre-commit==4.0.1
pylint==3.3.1
pytest-aiohttp==1.0.5
pytest-asyncio==0.23.6
pytest-cov==5.0.0
pytest-timeout==2.3.1
pytest==8.3.4
ruff==0.9.8
pytest==8.3.3
ruff==0.6.9
time-machine==2.16.0
typing_extensions==4.12.2
urllib3==2.3.0
urllib3==2.2.3

30
scripts/update-frontend.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/bash
source "/etc/supervisor_scripts/common"
set -e
# Update frontend
git submodule update --init --recursive --remote
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
cd home-assistant-polymer
nvm install
script/bootstrap
# Download translations
start_docker
./script/translations_download
# build frontend
cd hassio
./script/build_hassio
# Copy frontend
rm -rf ../../supervisor/api/panel/*
cp -rf build/* ../../supervisor/api/panel/
# Reset frontend git
cd ..
git reset --hard HEAD
stop_docker

View File

@@ -19,7 +19,7 @@ def _get_supervisor_version():
for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line):
return match.group(1)
return "9999.09.9.dev9999"
return "99.9.9dev"
setup(

View File

@@ -54,7 +54,8 @@ if __name__ == "__main__":
loop.set_debug(coresys.config.debug)
loop.run_until_complete(coresys.core.connect())
loop.run_until_complete(bootstrap.supervisor_debugger(coresys))
bootstrap.supervisor_debugger(coresys)
bootstrap.migrate_system_env(coresys)
# Signal health startup for container
run_os_startup_check_cleanup()

View File

@@ -6,7 +6,6 @@ from contextlib import suppress
from copy import deepcopy
from datetime import datetime
import errno
from functools import partial
from ipaddress import IPv4Address
import logging
from pathlib import Path, PurePath
@@ -20,7 +19,7 @@ from typing import Any, Final
import aiohttp
from awesomeversion import AwesomeVersionCompareException
from deepmerge import Merger
from securetar import AddFileError, atomic_contents_add, secure_path
from securetar import atomic_contents_add, secure_path
import voluptuous as vol
from voluptuous.humanize import humanize_error
@@ -82,13 +81,12 @@ from ..hardware.data import Device
from ..homeassistant.const import WSEvent, WSType
from ..jobs.const import JobExecutionLimit
from ..jobs.decorator import Job
from ..resolution.const import ContextType, IssueType, UnhealthyReason
from ..resolution.data import Issue
from ..resolution.const import UnhealthyReason
from ..store.addon import AddonStore
from ..utils import check_port
from ..utils.apparmor import adjust_profile
from ..utils.json import read_json_file, write_json_file
from ..utils.sentry import async_capture_exception
from ..utils.sentry import capture_exception
from .const import (
WATCHDOG_MAX_ATTEMPTS,
WATCHDOG_RETRY_SECONDS,
@@ -146,27 +144,11 @@ class Addon(AddonModel):
self._listeners: list[EventListener] = []
self._startup_event = asyncio.Event()
self._startup_task: asyncio.Task | None = None
self._boot_failed_issue = Issue(
IssueType.BOOT_FAIL, ContextType.ADDON, reference=self.slug
)
self._device_access_missing_issue = Issue(
IssueType.DEVICE_ACCESS_MISSING, ContextType.ADDON, reference=self.slug
)
def __repr__(self) -> str:
"""Return internal representation."""
return f"<Addon: {self.slug}>"
@property
def boot_failed_issue(self) -> Issue:
"""Get issue used if start on boot failed."""
return self._boot_failed_issue
@property
def device_access_missing_issue(self) -> Issue:
"""Get issue used if device access is missing and can't be automatically added."""
return self._device_access_missing_issue
@property
def state(self) -> AddonState:
"""Return state of the add-on."""
@@ -184,20 +166,6 @@ class Addon(AddonModel):
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
self._startup_event.set()
# Dismiss boot failed issue if present and we started
if (
new_state == AddonState.STARTED
and self.boot_failed_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self.boot_failed_issue)
# Dismiss device access missing issue if present and we stopped
if (
new_state == AddonState.STOPPED
and self.device_access_missing_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self.device_access_missing_issue)
self.sys_homeassistant.websocket.send_message(
{
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
@@ -243,7 +211,7 @@ class Addon(AddonModel):
await self.instance.install(self.version, default_image, arch=self.arch)
self.persist[ATTR_IMAGE] = default_image
await self.save_persist()
self.save_persist()
@property
def ip_address(self) -> IPv4Address:
@@ -354,13 +322,6 @@ class Addon(AddonModel):
"""Store user boot options."""
self.persist[ATTR_BOOT] = value
# Dismiss boot failed issue if present and boot at start disabled
if (
value == AddonBoot.MANUAL
and self._boot_failed_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self._boot_failed_issue)
@property
def auto_update(self) -> bool:
"""Return if auto update is enable."""
@@ -667,9 +628,9 @@ class Addon(AddonModel):
"""Is add-on loaded."""
return bool(self._listeners)
async def save_persist(self) -> None:
def save_persist(self) -> None:
"""Save data of add-on."""
await self.sys_addons.data.save_data()
self.sys_addons.data.save_data()
async def watchdog_application(self) -> bool:
"""Return True if application is running."""
@@ -772,7 +733,7 @@ class Addon(AddonModel):
)
async def install(self) -> None:
"""Install and setup this addon."""
await self.sys_addons.data.install(self.addon_store)
self.sys_addons.data.install(self.addon_store)
await self.load()
if not self.path_data.is_dir():
@@ -790,7 +751,7 @@ class Addon(AddonModel):
self.latest_version, self.addon_store.image, arch=self.arch
)
except DockerError as err:
await self.sys_addons.data.uninstall(self)
self.sys_addons.data.uninstall(self)
raise AddonsError() from err
# Add to addon manager
@@ -839,23 +800,23 @@ class Addon(AddonModel):
# Cleanup Ingress dynamic port assignment
if self.with_ingress:
await self.sys_ingress.del_dynamic_port(self.slug)
self.sys_create_task(self.sys_ingress.reload())
self.sys_ingress.del_dynamic_port(self.slug)
# Cleanup discovery data
for message in self.sys_discovery.list_messages:
if message.addon != self.slug:
continue
await self.sys_discovery.remove(message)
self.sys_discovery.remove(message)
# Cleanup services data
for service in self.sys_services.list_services:
if self.slug not in service.active:
continue
await service.del_service_data(self)
service.del_service_data(self)
# Remove from addon manager
await self.sys_addons.data.uninstall(self)
self.sys_addons.data.uninstall(self)
self.sys_addons.local.pop(self.slug)
@Job(
@@ -884,7 +845,7 @@ class Addon(AddonModel):
try:
_LOGGER.info("Add-on '%s' successfully updated", self.slug)
await self.sys_addons.data.update(store)
self.sys_addons.data.update(store)
await self._check_ingress_port()
# Cleanup
@@ -925,7 +886,7 @@ class Addon(AddonModel):
except DockerError as err:
raise AddonsError() from err
await self.sys_addons.data.update(self.addon_store)
self.sys_addons.data.update(self.addon_store)
await self._check_ingress_port()
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
@@ -977,21 +938,11 @@ class Addon(AddonModel):
return
# Need install/update
tmp_folder: TemporaryDirectory | None = None
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
profile_file = Path(tmp_folder, "apparmor.txt")
def install_update_profile() -> Path:
nonlocal tmp_folder
tmp_folder = TemporaryDirectory(dir=self.sys_config.path_tmp)
profile_file = Path(tmp_folder.name, "apparmor.txt")
adjust_profile(self.slug, self.path_apparmor, profile_file)
return profile_file
try:
profile_file = await self.sys_run_in_executor(install_update_profile)
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
finally:
if tmp_folder:
await self.sys_run_in_executor(tmp_folder.cleanup)
async def uninstall_apparmor(self) -> None:
"""Remove AppArmor profile for Add-on."""
@@ -1063,7 +1014,7 @@ class Addon(AddonModel):
# Access Token
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
await self.save_persist()
self.save_persist()
# Options
await self.write_options()
@@ -1218,25 +1169,6 @@ class Addon(AddonModel):
await self._backup_command(self.backup_post)
return None
def _is_excluded_by_filter(
self, origin_path: Path, arcname: str, item_arcpath: PurePath
) -> bool:
"""Filter out files from backup based on filters provided by addon developer.
This tests the dev provided filters against the full path of the file as
Supervisor sees them using match. This is done for legacy reasons, testing
against the relative path makes more sense and may be changed in the future.
"""
full_path = origin_path / item_arcpath.relative_to(arcname)
for exclude in self.backup_exclude:
if not full_path.match(exclude):
continue
_LOGGER.debug("Ignoring %s because of %s", full_path, exclude)
return True
return False
@Job(
name="addon_backup",
limit=JobExecutionLimit.GROUP_ONCE,
@@ -1248,45 +1180,46 @@ class Addon(AddonModel):
Returns a Task that completes when addon has state 'started' (see start)
for cold backup. Else nothing is returned.
"""
wait_for_start: Awaitable[None] | None = None
def _addon_backup(
store_image: bool,
metadata: dict[str, Any],
apparmor_profile: str | None,
addon_config_used: bool,
):
"""Start the backup process."""
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
# store local image
if store_image:
try:
self.instance.export_image(temp_path.joinpath("image.tar"))
except DockerError as err:
raise AddonsError() from err
# Store local configs/state
# store local image
if self.need_build:
try:
write_json_file(temp_path.joinpath("addon.json"), metadata)
except ConfigurationFileError as err:
await self.instance.export_image(temp_path.joinpath("image.tar"))
except DockerError as err:
raise AddonsError() from err
data = {
ATTR_USER: self.persist,
ATTR_SYSTEM: self.data,
ATTR_VERSION: self.version,
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
}
# Store local configs/state
try:
write_json_file(temp_path.joinpath("addon.json"), data)
except ConfigurationFileError as err:
raise AddonsError(
f"Can't save meta for {self.slug}", _LOGGER.error
) from err
# Store AppArmor Profile
if self.sys_host.apparmor.exists(self.slug):
profile = temp_path.joinpath("apparmor.txt")
try:
await self.sys_host.apparmor.backup_profile(self.slug, profile)
except HostAppArmorError as err:
raise AddonsError(
f"Can't save meta for {self.slug}", _LOGGER.error
"Can't backup AppArmor profile", _LOGGER.error
) from err
# Store AppArmor Profile
if apparmor_profile:
profile_backup_file = temp_path.joinpath("apparmor.txt")
try:
self.sys_host.apparmor.backup_profile(
apparmor_profile, profile_backup_file
)
except HostAppArmorError as err:
raise AddonsError(
"Can't backup AppArmor profile", _LOGGER.error
) from err
# Write tarfile
# write into tarfile
def _write_tarfile():
"""Write tar inside loop."""
with tar_file as backup:
# Backup metadata
backup.add(temp, arcname=".")
@@ -1295,56 +1228,32 @@ class Addon(AddonModel):
atomic_contents_add(
backup,
self.path_data,
file_filter=partial(
self._is_excluded_by_filter, self.path_data, "data"
),
excludes=self.backup_exclude,
arcname="data",
)
# Backup config
if addon_config_used:
if self.addon_config_used:
atomic_contents_add(
backup,
self.path_config,
file_filter=partial(
self._is_excluded_by_filter, self.path_config, "config"
),
excludes=self.backup_exclude,
arcname="config",
)
wait_for_start: Awaitable[None] | None = None
data = {
ATTR_USER: self.persist,
ATTR_SYSTEM: self.data,
ATTR_VERSION: self.version,
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
}
apparmor_profile = (
self.slug if self.sys_host.apparmor.exists(self.slug) else None
)
was_running = await self.begin_backup()
try:
_LOGGER.info("Building backup for add-on %s", self.slug)
await self.sys_run_in_executor(
partial(
_addon_backup,
store_image=self.need_build,
metadata=data,
apparmor_profile=apparmor_profile,
addon_config_used=self.addon_config_used,
)
)
_LOGGER.info("Finish backup for addon %s", self.slug)
except (tarfile.TarError, OSError, AddFileError) as err:
raise AddonsError(
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
) from err
finally:
if was_running:
wait_for_start = await self.end_backup()
is_running = await self.begin_backup()
try:
_LOGGER.info("Building backup for add-on %s", self.slug)
await self.sys_run_in_executor(_write_tarfile)
except (tarfile.TarError, OSError) as err:
raise AddonsError(
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
) from err
finally:
if is_running:
wait_for_start = await self.end_backup()
_LOGGER.info("Finish backup for addon %s", self.slug)
return wait_for_start
@Job(
@@ -1359,36 +1268,30 @@ class Addon(AddonModel):
if addon is started after restore. Else nothing is returned.
"""
wait_for_start: Awaitable[None] | None = None
# Extract backup
def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]:
"""Extract tar backup."""
tmp = TemporaryDirectory(dir=self.sys_config.path_tmp)
try:
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
# extract backup
def _extract_tarfile():
"""Extract tar backup."""
with tar_file as backup:
backup.extractall(
path=tmp.name,
path=Path(temp),
members=secure_path(backup),
filter="fully_trusted",
)
data = read_json_file(Path(tmp.name, "addon.json"))
except:
tmp.cleanup()
raise
try:
await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err:
raise AddonsError(
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
) from err
return tmp, data
# Read backup data
try:
data = read_json_file(Path(temp, "addon.json"))
except ConfigurationFileError as err:
raise AddonsError() from err
try:
tmp, data = await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err:
raise AddonsError(
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
) from err
except ConfigurationFileError as err:
raise AddonsError() from err
try:
# Validate
try:
data = SCHEMA_ADDON_BACKUP(data)
@@ -1408,7 +1311,7 @@ class Addon(AddonModel):
# Restore local add-on information
_LOGGER.info("Restore config for addon %s", self.slug)
restore_image = self._image(data[ATTR_SYSTEM])
await self.sys_addons.data.restore(
self.sys_addons.data.restore(
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
)
@@ -1422,7 +1325,7 @@ class Addon(AddonModel):
if not await self.instance.exists():
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
image_file = Path(tmp.name, "image.tar")
image_file = Path(temp, "image.tar")
if image_file.is_file():
with suppress(DockerError):
await self.instance.import_image(image_file)
@@ -1441,13 +1344,13 @@ class Addon(AddonModel):
# Restore data and config
def _restore_data():
"""Restore data and config."""
temp_data = Path(tmp.name, "data")
temp_data = Path(temp, "data")
if temp_data.is_dir():
shutil.copytree(temp_data, self.path_data, symlinks=True)
else:
self.path_data.mkdir()
temp_config = Path(tmp.name, "config")
temp_config = Path(temp, "config")
if temp_config.is_dir():
shutil.copytree(temp_config, self.path_config, symlinks=True)
elif self.addon_config_used:
@@ -1467,7 +1370,7 @@ class Addon(AddonModel):
) from err
# Restore AppArmor
profile_file = Path(tmp.name, "apparmor.txt")
profile_file = Path(temp, "apparmor.txt")
if profile_file.exists():
try:
await self.sys_host.apparmor.load_profile(
@@ -1475,8 +1378,7 @@ class Addon(AddonModel):
)
except HostAppArmorError as err:
_LOGGER.error(
"Can't restore AppArmor profile for add-on %s",
self.slug,
"Can't restore AppArmor profile for add-on %s", self.slug
)
raise AddonsError() from err
@@ -1488,8 +1390,7 @@ class Addon(AddonModel):
# Run add-on
if data[ATTR_STATE] == AddonState.STARTED:
wait_for_start = await self.start()
finally:
tmp.cleanup()
_LOGGER.info("Finished restore for add-on %s", self.slug)
return wait_for_start
@@ -1530,7 +1431,7 @@ class Addon(AddonModel):
except AddonsError as err:
attempts = attempts + 1
_LOGGER.error("Watchdog restart of addon %s failed!", self.name)
await async_capture_exception(err)
capture_exception(err)
else:
break

View File

@@ -34,29 +34,16 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
self.coresys: CoreSys = coresys
self.addon = addon
# Search for build file later in executor
super().__init__(None, SCHEMA_BUILD_CONFIG)
def _get_build_file(self) -> Path:
"""Get build file.
Must be run in executor.
"""
try:
return find_one_filetype(
build_file = find_one_filetype(
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
)
except ConfigurationFileError:
return self.addon.path_location / "build.json"
build_file = self.addon.path_location / "build.json"
async def read_data(self) -> None:
"""Load data from file."""
if not self._file:
self._file = await self.sys_run_in_executor(self._get_build_file)
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
await super().read_data()
async def save_data(self):
def save_data(self):
"""Ignore save function."""
raise RuntimeError()

View File

@@ -38,7 +38,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
"""Return local add-on data."""
return self._data[ATTR_SYSTEM]
async def install(self, addon: AddonStore) -> None:
def install(self, addon: AddonStore) -> None:
"""Set addon as installed."""
self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug] = {
@@ -46,28 +46,26 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
ATTR_VERSION: addon.version,
ATTR_IMAGE: addon.image,
}
await self.save_data()
self.save_data()
async def uninstall(self, addon: Addon) -> None:
def uninstall(self, addon: Addon) -> None:
"""Set add-on as uninstalled."""
self.system.pop(addon.slug, None)
self.user.pop(addon.slug, None)
await self.save_data()
self.save_data()
async def update(self, addon: AddonStore) -> None:
def update(self, addon: AddonStore) -> None:
"""Update version of add-on."""
self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug].update(
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
)
await self.save_data()
self.save_data()
async def restore(
self, slug: str, user: Config, system: Config, image: str
) -> None:
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
"""Restore data to add-on."""
self.user[slug] = deepcopy(user)
self.system[slug] = deepcopy(system)
self.user[slug][ATTR_IMAGE] = image
await self.save_data()
self.save_data()

View File

@@ -5,25 +5,27 @@ from collections.abc import Awaitable
from contextlib import suppress
import logging
import tarfile
from typing import Self, Union
from attr import evolve
from typing import Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HassioError,
HomeAssistantAPIError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils.sentry import async_capture_exception
from ..utils import check_exception_chain
from ..utils.sentry import capture_exception
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
@@ -74,11 +76,6 @@ class AddonManager(CoreSysAttributes):
return addon
return None
async def load_config(self) -> Self:
"""Load config in executor."""
await self.data.read_data()
return self
async def load(self) -> None:
"""Start up add-on management."""
# Refresh cache for all store addons
@@ -121,14 +118,15 @@ class AddonManager(CoreSysAttributes):
try:
if start_task := await addon.start():
wait_boot.append(start_task)
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except HassioError:
self.sys_resolution.add_issue(
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
pass # These are already handled
else:
continue
@@ -137,19 +135,6 @@ class AddonManager(CoreSysAttributes):
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True)
# After waiting for startup, create an issue for boot addons that are error or unknown state
# Ignore stopped as single shot addons can be run at boot and this is successful exit
# Timeout waiting for startup is not a failure, addon is probably just slow
for addon in tasks:
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
self.sys_resolution.add_issue(
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
@@ -170,7 +155,7 @@ class AddonManager(CoreSysAttributes):
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
await async_capture_exception(err)
capture_exception(err)
@Job(
name="addon_manager_install",
@@ -388,7 +373,7 @@ class AddonManager(CoreSysAttributes):
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
await async_capture_exception(err)
capture_exception(err)
else:
add_host_coros.append(
self.sys_plugins.dns.add_host(

View File

@@ -47,7 +47,7 @@ from ..const import (
ATTR_JOURNALD,
ATTR_KERNEL_MODULES,
ATTR_LEGACY,
ATTR_LOCATION,
ATTR_LOCATON,
ATTR_MACHINE,
ATTR_MAP,
ATTR_NAME,
@@ -210,6 +210,18 @@ class AddonModel(JobGroup, ABC):
"""Return description of add-on."""
return self.data[ATTR_DESCRIPTON]
@property
def long_description(self) -> str | None:
"""Return README.md as long_description."""
readme = Path(self.path_location, "README.md")
# If readme not exists
if not readme.exists():
return None
# Return data
return readme.read_text(encoding="utf-8")
@property
def repository(self) -> str:
"""Return repository of add-on."""
@@ -569,7 +581,7 @@ class AddonModel(JobGroup, ABC):
@property
def path_location(self) -> Path:
"""Return path to this add-on."""
return Path(self.data[ATTR_LOCATION])
return Path(self.data[ATTR_LOCATON])
@property
def path_icon(self) -> Path:
@@ -634,21 +646,6 @@ class AddonModel(JobGroup, ABC):
"""Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS]
async def long_description(self) -> str | None:
"""Return README.md as long_description."""
def read_readme() -> str | None:
readme = Path(self.path_location, "README.md")
# If readme not exists
if not readme.exists():
return None
# Return data
return readme.read_text(encoding="utf-8")
return await self.sys_run_in_executor(read_readme)
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""

View File

@@ -46,7 +46,6 @@ def rating_security(addon: AddonModel) -> int:
privilege in addon.privileged
for privilege in (
Capabilities.BPF,
Capabilities.CHECKPOINT_RESTORE,
Capabilities.DAC_READ_SEARCH,
Capabilities.NET_ADMIN,
Capabilities.NET_RAW,

View File

@@ -55,7 +55,7 @@ from ..const import (
ATTR_KERNEL_MODULES,
ATTR_LABELS,
ATTR_LEGACY,
ATTR_LOCATION,
ATTR_LOCATON,
ATTR_MACHINE,
ATTR_MAP,
ATTR_NAME,
@@ -483,7 +483,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
_migrate_addon_config(),
_SCHEMA_ADDON_CONFIG.extend(
{
vol.Required(ATTR_LOCATION): str,
vol.Required(ATTR_LOCATON): str,
vol.Required(ATTR_REPOSITORY): str,
vol.Required(ATTR_TRANSLATIONS, default=dict): {
str: SCHEMA_ADDON_TRANSLATIONS

View File

@@ -10,7 +10,7 @@ from aiohttp import web
from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
from ..utils.sentry import async_capture_exception
from ..utils.sentry import capture_exception
from .addons import APIAddons
from .audio import APIAudio
from .auth import APIAuth
@@ -412,8 +412,7 @@ class RestAPI(CoreSysAttributes):
if not isinstance(err, HostNotSupportedError):
# No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center.
await async_capture_exception(err)
kwargs.pop("follow", None) # Follow is not supported for Docker logs
capture_exception(err)
return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes(

View File

@@ -106,7 +106,6 @@ from ..exceptions import (
APIAddonNotInstalled,
APIError,
APIForbidden,
APINotFound,
PwnedError,
PwnedSecret,
)
@@ -162,7 +161,7 @@ class APIAddons(CoreSysAttributes):
addon = self.sys_addons.get(addon_slug)
if not addon:
raise APINotFound(f"Addon {addon_slug} does not exist")
raise APIError(f"Addon {addon_slug} does not exist")
if not isinstance(addon, Addon) or not addon.is_installed:
raise APIAddonNotInstalled("Addon is not installed")
@@ -212,7 +211,7 @@ class APIAddons(CoreSysAttributes):
ATTR_HOSTNAME: addon.hostname,
ATTR_DNS: addon.dns,
ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: await addon.long_description(),
ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_ADVANCED: addon.advanced,
ATTR_STAGE: addon.stage,
ATTR_REPOSITORY: addon.repository,
@@ -322,7 +321,7 @@ class APIAddons(CoreSysAttributes):
if ATTR_WATCHDOG in body:
addon.watchdog = body[ATTR_WATCHDOG]
await addon.save_persist()
addon.save_persist()
@api_process
async def sys_options(self, request: web.Request) -> None:
@@ -336,7 +335,7 @@ class APIAddons(CoreSysAttributes):
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
await addon.save_persist()
addon.save_persist()
@api_process
async def options_validate(self, request: web.Request) -> None:
@@ -402,7 +401,7 @@ class APIAddons(CoreSysAttributes):
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
addon.protected = body[ATTR_PROTECTED]
await addon.save_persist()
addon.save_persist()
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:

View File

@@ -99,7 +99,7 @@ class APIAuth(CoreSysAttributes):
@api_process
async def cache(self, request: web.Request) -> None:
"""Process cache reset request."""
await self.sys_auth.reset_data()
self.sys_auth.reset_data()
@api_process
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:

View File

@@ -1,11 +1,8 @@
"""Backups RESTful API."""
from __future__ import annotations
import asyncio
from collections.abc import Callable
import errno
from io import IOBase
import logging
from pathlib import Path
import re
@@ -15,10 +12,8 @@ from typing import Any
from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..backups.backup import Backup
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..const import (
ATTR_ADDONS,
@@ -27,81 +22,44 @@ from ..const import (
ATTR_CONTENT,
ATTR_DATE,
ATTR_DAYS_UNTIL_STALE,
ATTR_EXTRA,
ATTR_FILENAME,
ATTR_FOLDERS,
ATTR_HOMEASSISTANT,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
ATTR_JOB_ID,
ATTR_LOCATION,
ATTR_LOCATON,
ATTR_NAME,
ATTR_PASSWORD,
ATTR_PATH,
ATTR_PROTECTED,
ATTR_REPOSITORIES,
ATTR_SIZE,
ATTR_SIZE_BYTES,
ATTR_SLUG,
ATTR_SUPERVISOR_VERSION,
ATTR_TIMEOUT,
ATTR_TYPE,
ATTR_VERSION,
REQUEST_FROM,
BusEvent,
CoreState,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden, APINotFound
from ..exceptions import APIError
from ..jobs import JobSchedulerOptions
from ..mounts.const import MountUsage
from ..resolution.const import UnhealthyReason
from .const import (
ATTR_ADDITIONAL_LOCATIONS,
ATTR_BACKGROUND,
ATTR_LOCATION_ATTRIBUTES,
ATTR_LOCATIONS,
CONTENT_TYPE_TAR,
)
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
ALL_ADDONS_FLAG = "ALL"
LOCATION_LOCAL = ".local"
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
# Backwards compatible
# Remove: 2022.08
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
def _ensure_list(item: Any) -> list:
"""Ensure value is a list."""
if not isinstance(item, list):
return [item]
return item
def _convert_local_location(item: str | None) -> str | None:
"""Convert local location value."""
if item in {LOCATION_LOCAL, ""}:
return None
return item
# pylint: disable=no-value-for-parameter
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique())
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
SCHEMA_RESTORE_FULL = vol.Schema(
{
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION,
}
)
@@ -109,36 +67,40 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
}
)
SCHEMA_BACKUP_FULL = vol.Schema(
{
vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST,
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
vol.Optional(ATTR_EXTRA): dict,
}
)
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{
vol.Optional(ATTR_ADDONS): vol.Or(
ALL_ADDONS_FLAG, vol.All([str], vol.Unique())
),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
}
)
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale})
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))})
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST})
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
}
)
SCHEMA_FREEZE = vol.Schema(
{
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
}
)
class APIBackups(CoreSysAttributes):
@@ -148,19 +110,9 @@ class APIBackups(CoreSysAttributes):
"""Return backup, throw an exception if it doesn't exist."""
backup = self.sys_backups.get(request.match_info.get("slug"))
if not backup:
raise APINotFound("Backup does not exist")
raise APIError("Backup does not exist")
return backup
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
"""Make location attributes dictionary."""
return {
loc if loc else LOCATION_LOCAL: {
ATTR_PROTECTED: backup.all_locations[loc][ATTR_PROTECTED],
ATTR_SIZE_BYTES: backup.all_locations[loc][ATTR_SIZE_BYTES],
}
for loc in backup.locations
}
def _list_backups(self):
"""Return list of backups."""
return [
@@ -170,11 +122,8 @@ class APIBackups(CoreSysAttributes):
ATTR_DATE: backup.date,
ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes,
ATTR_LOCATION: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_LOCATON: backup.location,
ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
@@ -183,7 +132,6 @@ class APIBackups(CoreSysAttributes):
},
}
for backup in self.sys_backups.list_backups
if backup.location != LOCATION_CLOUD_BACKUP
]
@api_process
@@ -213,7 +161,7 @@ class APIBackups(CoreSysAttributes):
if ATTR_DAYS_UNTIL_STALE in body:
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
await self.sys_backups.save_data()
self.sys_backups.save_data()
@api_process
async def reload(self, _):
@@ -243,53 +191,30 @@ class APIBackups(CoreSysAttributes):
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes,
ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATION: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_LOCATON: backup.location,
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
ATTR_EXTRA: backup.extra,
}
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
"""Convert a single location to a mount if possible."""
if not location or location == LOCATION_CLOUD_BACKUP:
return location
mount = self.sys_mounts.get(location)
if mount.usage != MountUsage.BACKUP:
raise APIError(
f"Mount {mount.name} is not used for backups, cannot backup to there"
)
return mount
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Change location field to mount if necessary."""
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION))
return body
if not body.get(ATTR_LOCATON):
return body
def _validate_cloud_backup_location(
self, request: web.Request, location: list[str | None] | str | None
) -> None:
"""Cloud backup location is only available to Home Assistant."""
if not isinstance(location, list):
location = [location]
if (
LOCATION_CLOUD_BACKUP in location
and request.get(REQUEST_FROM) != self.sys_homeassistant
):
raise APIForbidden(
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
raise APIError(
f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
)
return body
async def _background_backup_task(
self, backup_method: Callable, *args, **kwargs
) -> tuple[asyncio.Task, str]:
@@ -309,42 +234,24 @@ class APIBackups(CoreSysAttributes):
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
)
try:
event_task = self.sys_create_task(event.wait())
_, pending = await asyncio.wait(
await asyncio.wait(
(
backup_task,
event_task,
self.sys_create_task(event.wait()),
),
return_when=asyncio.FIRST_COMPLETED,
)
# It seems backup returned early (error or something), make sure to cancel
# the event task to avoid "Task was destroyed but it is pending!" errors.
if event_task in pending:
event_task.cancel()
return (backup_task, job.uuid)
finally:
self.sys_bus.remove_listener(listener)
@api_process
async def backup_full(self, request: web.Request):
async def backup_full(self, request):
"""Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body:
location_names: list[str | None] = body.pop(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_full, **body
self.sys_backups.do_backup_full, **self._location_to_mount(body)
)
if background and not backup_task.done():
@@ -359,28 +266,12 @@ class APIBackups(CoreSysAttributes):
)
@api_process
async def backup_partial(self, request: web.Request):
async def backup_partial(self, request):
"""Create a partial backup."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body:
location_names: list[str | None] = body.pop(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
body[ATTR_ADDONS] = list(self.sys_addons.local)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_partial, **body
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
)
if background and not backup_task.done():
@@ -395,13 +286,10 @@ class APIBackups(CoreSysAttributes):
)
@api_process
async def restore_full(self, request: web.Request):
async def restore_full(self, request):
"""Full restore of a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_full, backup, **body
@@ -415,13 +303,10 @@ class APIBackups(CoreSysAttributes):
)
@api_process
async def restore_partial(self, request: web.Request):
async def restore_partial(self, request):
"""Partial restore a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_partial, backup, **body
@@ -435,136 +320,59 @@ class APIBackups(CoreSysAttributes):
)
@api_process
async def freeze(self, request: web.Request):
async def freeze(self, request):
"""Initiate manual freeze for external backup."""
body = await api_validate(SCHEMA_FREEZE, request)
await asyncio.shield(self.sys_backups.freeze_all(**body))
@api_process
async def thaw(self, request: web.Request):
async def thaw(self, request):
"""Begin thaw after manual freeze."""
await self.sys_backups.thaw_all()
@api_process
async def remove(self, request: web.Request):
async def remove(self, request):
"""Remove a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_REMOVE, request)
locations: list[LOCATION_TYPE] | None = None
return self.sys_backups.remove(backup)
if ATTR_LOCATION in body:
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
else:
self._validate_cloud_backup_location(request, backup.location)
await self.sys_backups.remove(backup, locations=locations)
@api_process
async def download(self, request: web.Request):
async def download(self, request):
"""Download a backup file."""
backup = self._extract_slug(request)
# Query will give us '' for /backups, convert value to None
location = _convert_local_location(
request.query.get(ATTR_LOCATION, backup.location)
)
self._validate_cloud_backup_location(request, location)
if location not in backup.all_locations:
raise APIError(f"Backup {backup.slug} is not in location {location}")
_LOGGER.info("Downloading backup %s", backup.slug)
filename = backup.all_locations[location][ATTR_PATH]
# If the file is missing, return 404 and trigger reload of location
if not filename.is_file():
self.sys_create_task(self.sys_backups.reload(location))
return web.Response(status=404)
response = web.FileResponse(filename)
response = web.FileResponse(backup.tarfile)
response.content_type = CONTENT_TYPE_TAR
download_filename = filename.name
if download_filename == f"{backup.slug}.tar":
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={download_filename}"
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
)
return response
@api_process
async def upload(self, request: web.Request):
async def upload(self, request):
"""Upload a backup file."""
location: LOCATION_TYPE = None
locations: list[LOCATION_TYPE] | None = None
tmp_path = self.sys_config.path_tmp
if ATTR_LOCATION in request.query:
location_names: list[str] = request.query.getall(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
# Convert empty string to None if necessary
locations = [
self._location_to_mount(location)
if _convert_local_location(location)
else None
for location in location_names
]
location = locations.pop(0)
if location and location != LOCATION_CLOUD_BACKUP:
tmp_path = location.local_where
filename: str | None = None
if ATTR_FILENAME in request.query:
filename = request.query.get(ATTR_FILENAME)
try:
vol.Match(RE_BACKUP_FILENAME)(filename)
except vol.Invalid as ex:
raise APIError(humanize_error(filename, ex)) from None
temp_dir: TemporaryDirectory | None = None
backup_file_stream: IOBase | None = None
def open_backup_file() -> Path:
nonlocal temp_dir, backup_file_stream
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
tar_file = Path(temp_dir.name, "backup.tar")
backup_file_stream = tar_file.open("wb")
return tar_file
def close_backup_file() -> None:
if backup_file_stream:
backup_file_stream.close()
if temp_dir:
temp_dir.cleanup()
try:
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
tar_file = Path(temp_dir, "backup.tar")
reader = await request.multipart()
contents = await reader.next()
tar_file = await self.sys_run_in_executor(open_backup_file)
while chunk := await contents.read_chunk(size=2**16):
await self.sys_run_in_executor(backup_file_stream.write, chunk)
try:
with tar_file.open("wb") as backup:
while True:
chunk = await contents.read_chunk()
if not chunk:
break
backup.write(chunk)
backup = await asyncio.shield(
self.sys_backups.import_backup(
tar_file,
filename,
location=location,
additional_locations=locations,
)
)
except OSError as err:
if err.errno == errno.EBADMSG and location in {
LOCATION_CLOUD_BACKUP,
None,
}:
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
_LOGGER.error("Can't write new backup file: %s", err)
return False
except OSError as err:
if err.errno == errno.EBADMSG:
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
_LOGGER.error("Can't write new backup file: %s", err)
return False
except asyncio.CancelledError:
return False
except asyncio.CancelledError:
return False
finally:
if temp_dir or backup:
await self.sys_run_in_executor(close_backup_file)
backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
if backup:
return {ATTR_SLUG: backup.slug}

View File

@@ -12,7 +12,6 @@ CONTENT_TYPE_X_LOG = "text/x-log"
COOKIE_INGRESS = "ingress_session"
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
ATTR_AGENT_VERSION = "agent_version"
ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes"
@@ -43,12 +42,11 @@ ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"
ATTR_IS_OWNER = "is_owner"
ATTR_JOB_ID = "job_id"
ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_LOCAL_ONLY = "local_only"
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
ATTR_LOCATIONS = "locations"
ATTR_MDNS = "mdns"
ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts"
@@ -70,7 +68,6 @@ ATTR_UPDATE_TYPE = "update_type"
ATTR_USAGE = "usage"
ATTR_USE_NTP = "use_ntp"
ATTR_USERS = "users"
ATTR_USER_PATH = "user_path"
ATTR_VENDOR = "vendor"
ATTR_VIRTUALIZATION = "virtualization"

View File

@@ -16,7 +16,7 @@ from ..const import (
AddonState,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden, APINotFound
from ..exceptions import APIError, APIForbidden
from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -36,7 +36,7 @@ class APIDiscovery(CoreSysAttributes):
"""Extract discovery message from URL."""
message = self.sys_discovery.get(request.match_info.get("uuid"))
if not message:
raise APINotFound("Discovery message not found")
raise APIError("Discovery message not found")
return message
@api_process
@@ -83,7 +83,7 @@ class APIDiscovery(CoreSysAttributes):
)
# Process discovery message
message = await self.sys_discovery.send(addon, **body)
message = self.sys_discovery.send(addon, **body)
return {ATTR_UUID: message.uuid}
@@ -110,5 +110,5 @@ class APIDiscovery(CoreSysAttributes):
if message.addon != addon.slug:
raise APIForbidden("Can't remove discovery message")
await self.sys_discovery.remove(message)
self.sys_discovery.remove(message)
return True

View File

@@ -78,7 +78,7 @@ class APICoreDNS(CoreSysAttributes):
if restart_required:
self.sys_create_task(self.sys_plugins.dns.restart())
await self.sys_plugins.dns.save_data()
self.sys_plugins.dns.save_data()
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:

View File

@@ -16,7 +16,6 @@ from ..const import (
ATTR_VERSION,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APINotFound
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -53,17 +52,14 @@ class APIDocker(CoreSysAttributes):
for hostname, registry in body.items():
self.sys_docker.config.registries[hostname] = registry
await self.sys_docker.config.save_data()
self.sys_docker.config.save_data()
@api_process
async def remove_registry(self, request: web.Request):
"""Delete a docker registry."""
hostname = request.match_info.get(ATTR_HOSTNAME)
if hostname not in self.sys_docker.config.registries:
raise APINotFound(f"Hostname {hostname} does not exist in registries")
del self.sys_docker.config.registries[hostname]
await self.sys_docker.config.save_data()
self.sys_docker.config.save_data()
@api_process
async def info(self, request: web.Request):

View File

@@ -149,7 +149,7 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_BACKUPS_EXCLUDE_DATABASE
]
await self.sys_homeassistant.save_data()
self.sys_homeassistant.save_data()
@api_process
async def stats(self, request: web.Request) -> dict[Any, str]:

View File

@@ -4,7 +4,7 @@ import asyncio
from contextlib import suppress
import logging
from aiohttp import ClientConnectionResetError, web
from aiohttp import web
from aiohttp.hdrs import ACCEPT, RANGE
import voluptuous as vol
from voluptuous.error import CoerceInvalid
@@ -98,10 +98,10 @@ class APIHost(CoreSysAttributes):
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
ATTR_CPE: self.sys_host.info.cpe,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_DISK_FREE: await self.sys_host.info.free_space(),
ATTR_DISK_TOTAL: await self.sys_host.info.total_space(),
ATTR_DISK_USED: await self.sys_host.info.used_space(),
ATTR_DISK_LIFE_TIME: await self.sys_host.info.disk_life_time(),
ATTR_DISK_FREE: self.sys_host.info.free_space,
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
ATTR_DISK_USED: self.sys_host.info.used_space,
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
ATTR_FEATURES: self.sys_host.features,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
@@ -239,12 +239,12 @@ class APIHost(CoreSysAttributes):
# return 2 lines at minimum.
lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines - 1}:{'' if follow else lines}"
range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
elif RANGE in request.headers:
range_header = request.headers.get(RANGE)
else:
range_header = (
f"entries=:-{DEFAULT_LINES - 1}:{'' if follow else DEFAULT_LINES}"
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
)
async with self.sys_host.logs.journald_logs(
@@ -258,13 +258,9 @@ class APIHost(CoreSysAttributes):
if not headers_returned:
if cursor:
response.headers["X-First-Cursor"] = cursor
response.headers["X-Accel-Buffering"] = "no"
await response.prepare(request)
headers_returned = True
# When client closes the connection while reading busy logs, we
# sometimes get this exception. It should be safe to ignore it.
with suppress(ClientConnectionResetError):
await response.write(line.encode("utf-8") + b"\n")
await response.write(line.encode("utf-8") + b"\n")
except ConnectionResetError as ex:
raise APIError(
"Connection reset when trying to fetch data from systemd-journald."

View File

@@ -277,7 +277,6 @@ class APIIngress(CoreSysAttributes):
response.content_type = content_type
try:
response.headers["X-Accel-Buffering"] = "no"
await response.prepare(request)
async for data in result.content.iter_chunked(4096):
await response.write(data)

View File

@@ -7,7 +7,7 @@ from aiohttp import web
import voluptuous as vol
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, JobNotFound
from ..exceptions import APIError
from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS
@@ -23,24 +23,10 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions."""
def _extract_job(self, request: web.Request) -> SupervisorJob:
"""Extract job from request or raise."""
try:
return self.sys_jobs.get_job(request.match_info.get("uuid"))
except JobNotFound:
raise APINotFound("Job does not exist") from None
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
"""Return current job tree.
Jobs are added to cache as they are created so by default they are in oldest to newest.
This is correct ordering for child jobs as it makes logical sense to present those in
the order they occurred within the parent. For the list as a whole, sort from newest
to oldest as its likely any client is most interested in the newer ones.
"""
# Initially sort oldest to newest so all child lists end up in correct order
"""Return current job tree."""
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in sorted(self.sys_jobs.jobs):
for job in self.sys_jobs.jobs:
if job.internal:
continue
@@ -49,15 +35,11 @@ class APIJobs(CoreSysAttributes):
else:
jobs_by_parent[job.parent_id].append(job)
# After parent-child organization, sort the root jobs only from newest to oldest
job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
[(job_list, start)]
if start
else [
(job_list, job)
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
]
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
)
while queue:
@@ -92,25 +74,25 @@ class APIJobs(CoreSysAttributes):
if ATTR_IGNORE_CONDITIONS in body:
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
await self.sys_jobs.save_data()
self.sys_jobs.save_data()
await self.sys_resolution.evaluate.evaluate_system()
@api_process
async def reset(self, request: web.Request) -> None:
"""Reset options for JobManager."""
await self.sys_jobs.reset_data()
self.sys_jobs.reset_data()
@api_process
async def job_info(self, request: web.Request) -> dict[str, Any]:
"""Get details of a job by ID."""
job = self._extract_job(request)
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
return self._list_jobs(job)[0]
@api_process
async def remove_job(self, request: web.Request) -> None:
"""Remove a completed job."""
job = self._extract_job(request)
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
if not job.done:
raise APIError(f"Job {job.uuid} is not done!")

View File

@@ -7,11 +7,11 @@ import voluptuous as vol
from ..const import ATTR_NAME, ATTR_STATE
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound
from ..exceptions import APIError
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
from ..mounts.mount import Mount
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
from .const import ATTR_MOUNTS, ATTR_USER_PATH
from .const import ATTR_MOUNTS
from .utils import api_process, api_validate
SCHEMA_OPTIONS = vol.Schema(
@@ -24,13 +24,6 @@ SCHEMA_OPTIONS = vol.Schema(
class APIMounts(CoreSysAttributes):
"""Handle REST API for mounting options."""
def _extract_mount(self, request: web.Request) -> Mount:
"""Extract mount from request or raise."""
name = request.match_info.get("mount")
if name not in self.sys_mounts:
raise APINotFound(f"No mount exists with name {name}")
return self.sys_mounts.get(name)
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return MountManager info."""
@@ -39,13 +32,7 @@ class APIMounts(CoreSysAttributes):
if self.sys_mounts.default_backup_mount
else None,
ATTR_MOUNTS: [
mount.to_dict()
| {
ATTR_STATE: mount.state,
ATTR_USER_PATH: mount.container_where.as_posix()
if mount.container_where
else None,
}
mount.to_dict() | {ATTR_STATE: mount.state}
for mount in self.sys_mounts.mounts
],
}
@@ -66,7 +53,7 @@ class APIMounts(CoreSysAttributes):
else:
self.sys_mounts.default_backup_mount = mount
await self.sys_mounts.save_data()
self.sys_mounts.save_data()
@api_process
async def create_mount(self, request: web.Request) -> None:
@@ -87,18 +74,20 @@ class APIMounts(CoreSysAttributes):
if not self.sys_mounts.default_backup_mount:
self.sys_mounts.default_backup_mount = mount
await self.sys_mounts.save_data()
self.sys_mounts.save_data()
@api_process
async def update_mount(self, request: web.Request) -> None:
"""Update an existing mount in supervisor."""
current = self._extract_mount(request)
name = request.match_info.get("mount")
name_schema = vol.Schema(
{vol.Optional(ATTR_NAME, default=current.name): current.name},
extra=vol.ALLOW_EXTRA,
{vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
)
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
if name not in self.sys_mounts:
raise APIError(f"No mount exists with name {name}")
mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount)
@@ -110,26 +99,26 @@ class APIMounts(CoreSysAttributes):
elif self.sys_mounts.default_backup_mount == mount:
self.sys_mounts.default_backup_mount = None
await self.sys_mounts.save_data()
self.sys_mounts.save_data()
@api_process
async def delete_mount(self, request: web.Request) -> None:
"""Delete an existing mount in supervisor."""
current = self._extract_mount(request)
mount = await self.sys_mounts.remove_mount(current.name)
name = request.match_info.get("mount")
mount = await self.sys_mounts.remove_mount(name)
# If it was a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())
await self.sys_mounts.save_data()
self.sys_mounts.save_data()
@api_process
async def reload_mount(self, request: web.Request) -> None:
"""Reload an existing mount in supervisor."""
mount = self._extract_mount(request)
await self.sys_mounts.reload_mount(mount.name)
name = request.match_info.get("mount")
await self.sys_mounts.reload_mount(name)
# If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())

View File

@@ -42,7 +42,7 @@ from ..const import (
DOCKER_NETWORK_MASK,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, HostNetworkNotFound
from ..exceptions import APIError, HostNetworkNotFound
from ..host.configuration import (
AccessPoint,
Interface,
@@ -167,7 +167,7 @@ class APINetwork(CoreSysAttributes):
except HostNetworkNotFound:
pass
raise APINotFound(f"Interface {name} does not exist") from None
raise APIError(f"Interface {name} does not exist") from None
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:

View File

@@ -169,7 +169,7 @@ class APIOS(CoreSysAttributes):
body[ATTR_SYSTEM_HEALTH_LED]
)
await self.sys_dbus.agent.board.green.save_data()
self.sys_dbus.agent.board.green.save_data()
@api_process
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
@@ -196,7 +196,7 @@ class APIOS(CoreSysAttributes):
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
await self.sys_dbus.agent.board.yellow.save_data()
self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM,

View File

@@ -1 +1 @@
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([4-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[89]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.9ac99222ee42fbb3.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.85ccafe1fda9d9a5.js")}else d("/api/hassio/app/frontend_es5/entrypoint.85ccafe1fda9d9a5.js")}()
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"version":3,"file":"1081.e647cbe586ff9dd0.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"sXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,GACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}

View File

@@ -1,2 +0,0 @@
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1295"],{21393:function(s,n,e){e.r(n)}}]);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More