Compare commits

..

2 Commits

Author SHA1 Message Date
Paulus Schoutsen
ec897081cd Update supervisor/api/__init__.py 2022-01-20 14:58:00 -08:00
Paulus Schoutsen
839361133a Add refresh updates API endpoint 2022-01-20 10:14:37 -08:00
543 changed files with 9972 additions and 18481 deletions

View File

@@ -31,7 +31,6 @@ categories:
- title: ":arrow_up: Dependency Updates"
label: "dependencies"
collapse-after: 1
include-labels:
- "breaking-change"

View File

@@ -33,7 +33,6 @@ on:
- setup.py
env:
DEFAULT_PYTHON: 3.9
BUILD_NAME: supervisor
BUILD_TYPE: supervisor
WHEELS_TAG: 3.9-alpine3.14
@@ -50,7 +49,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
with:
fetch-depth: 0
@@ -85,7 +84,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
with:
fetch-depth: 0
@@ -110,14 +109,14 @@ jobs:
- name: Login to DockerHub
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v1.14.1
uses: docker/login-action@v1.12.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v1.14.1
uses: docker/login-action@v1.12.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -128,7 +127,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2022.03.1
uses: home-assistant/builder@2021.12.0
with:
args: |
$BUILD_ARGS \
@@ -139,43 +138,30 @@ jobs:
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
codenotary:
name: CAS signature
name: CodeNotary signature
needs: init
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
with:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v3.1.2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Set version
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version@master
with:
type: ${{ env.BUILD_TYPE }}
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
id: dirhash
run: |
pip3 install dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "::set-output name=dirhash::${dir_hash}"
- name: Signing Source
- name: Signing image
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/codenotary@master
with:
source: hash://${{ steps.dirhash.outputs.dirhash }}
asset: supervisor-${{ needs.init.outputs.version }}
token: ${{ secrets.CAS_TOKEN }}
source: dir://${{ github.workspace }}
user: ${{ secrets.VCN_USER }}
password: ${{ secrets.VCN_PASSWORD }}
organisation: ${{ secrets.VCN_ORG }}
version:
name: Update version
@@ -184,7 +170,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -209,11 +195,11 @@ jobs:
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2022.03.1
uses: home-assistant/builder@2021.12.0
with:
args: |
--test \
@@ -260,13 +246,13 @@ jobs:
run: |
echo "Checking supervisor info"
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
if [ "$test" != "ok" ];then
exit 1
fi
echo "Checking supervisor network info"
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
if [ "$test" != "ok" ];then
exit 1
fi
@@ -274,19 +260,13 @@ jobs:
run: |
echo "Install Core SSH Add-on"
test=$(docker exec hassio_cli ha addons install core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
if [ "$test" != "ok" ];then
exit 1
fi
echo "Start Core SSH Add-on"
test=$(docker exec hassio_cli ha addons start core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
if [ "$test" != "ok" ];then
exit 1
fi
@@ -295,77 +275,19 @@ jobs:
run: |
echo "Enable Content-Trust"
test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
if [ "$test" != "ok" ];then
exit 1
fi
echo "Run supervisor health check"
test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
if [ "$test" != "ok" ];then
exit 1
fi
echo "Check supervisor unhealthy"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]')
if [ "$test" != "" ]; then
exit 1
fi
echo "Check supervisor supported"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unsupported[]')
if [[ "$test" =~ source_mods ]]; then
exit 1
fi
- name: Create full backup
id: backup
run: |
test=$(docker exec hassio_cli ha backups new --no-progress --raw-json)
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
exit 1
fi
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
- name: Uninstall SSH add-on
run: |
test=$(docker exec hassio_cli ha addons uninstall core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Restart supervisor
run: |
test=$(docker exec hassio_cli ha supervisor restart --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Wait for Supervisor to come up
run: |
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
ping="error"
while [ "$ping" != "ok" ]; do
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
sleep 5
done
- name: Restore SSH add-on from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --addons core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
exit 1
fi
- name: Restore SSL directory from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
if [ "$test" != "" ];then
exit 1
fi

View File

@@ -10,7 +10,7 @@ on:
env:
DEFAULT_PYTHON: 3.9
PRE_COMMIT_HOME: ~/.cache/pre-commit
DEFAULT_CAS: v1.0.2
DEFAULT_VCN: v0.9.8
jobs:
# Separate job to pre-populate the base dependency cache
@@ -23,15 +23,15 @@ jobs:
name: Prepare Python ${{ matrix.python-version }} dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
with:
python-version: ${{ matrix.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -45,7 +45,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
@@ -64,15 +64,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -93,7 +93,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -108,15 +108,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -128,7 +128,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
@@ -152,15 +152,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -184,15 +184,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -204,7 +204,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
@@ -225,15 +225,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -245,7 +245,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
@@ -269,15 +269,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -301,15 +301,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -321,7 +321,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
@@ -345,19 +345,19 @@ jobs:
name: Run tests Python ${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ matrix.python-version }}
- name: Install CAS tools
uses: home-assistant/actions/helpers/cas@master
- name: Install VCN tools
uses: home-assistant/actions/helpers/vcn@master
with:
version: ${{ env.DEFAULT_CAS }}
vcn_version: ${{ env.DEFAULT_VCN }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -392,7 +392,7 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v3.0.0
uses: actions/upload-artifact@v2.3.1
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
@@ -403,15 +403,15 @@ jobs:
needs: pytest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v3.1.2
uses: actions/setup-python@v2.3.1
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.2
uses: actions/cache@v2.1.7
with:
path: venv
key: |
@@ -422,7 +422,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v3
uses: actions/download-artifact@v2
- name: Combine coverage results
run: |
. venv/bin/activate
@@ -430,4 +430,4 @@ jobs:
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3.1.0
uses: codecov/codecov-action@v2.1.0

View File

@@ -9,7 +9,7 @@ jobs:
lock:
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v3.0.0
- uses: dessant/lock-threads@v3
with:
github-token: ${{ github.token }}
issue-inactive-days: "30"

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
with:
fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
echo "::set-output name=version::$datepre.$newpost"
- name: Run Release Drafter
uses: release-drafter/release-drafter@v5.19.0
uses: release-drafter/release-drafter@v5
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
uses: actions/checkout@v2.4.0
- name: Sentry Release
uses: getsentry/action-release@v1.1.6
env:

View File

@@ -9,7 +9,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v5.0.0
- uses: actions/stale@v4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 60

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/psf/black
rev: 22.3.0
rev: 21.12b0
hooks:
- id: black
args:
@@ -28,7 +28,7 @@ repos:
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v2.32.0
rev: v2.31.0
hooks:
- id: pyupgrade
args: [--py39-plus]

View File

@@ -5,12 +5,10 @@ ENV \
S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost
ARG \
BUILD_ARCH \
CAS_VERSION
ARG BUILD_ARCH
WORKDIR /usr/src
# Install base
WORKDIR /usr/src
RUN \
set -x \
&& apk add --no-cache \
@@ -20,20 +18,7 @@ RUN \
libffi \
libpulse \
musl \
openssl \
&& apk add --no-cache --virtual .build-dependencies \
build-base \
go \
\
&& git clone -b "v${CAS_VERSION}" --depth 1 \
https://github.com/codenotary/cas \
&& cd cas \
&& make cas \
&& mv cas /usr/bin/cas \
\
&& apk del .build-dependencies \
&& rm -rf /root/go /root/.cache \
&& rm -rf /usr/src/cas
openssl
# Install requirements
COPY requirements.txt .

View File

@@ -9,8 +9,6 @@ build_from:
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
args:
CAS_VERSION: 1.0.2
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor

View File

@@ -1,25 +1,22 @@
aiodns==3.0.0
aiohttp==3.8.1
async_timeout==4.0.2
atomicwrites==1.4.0
attrs==21.4.0
awesomeversion==22.4.2
attrs==21.2.0
awesomeversion==22.1.0
brotli==1.0.9
cchardet==2.1.7
ciso8601==2.2.0
colorlog==6.6.0
cpe==1.2.1
cryptography==36.0.2
debugpy==1.6.0
cryptography==36.0.1
debugpy==1.5.1
deepmerge==1.0.1
dirhash==0.2.1
docker==5.0.3
gitpython==3.1.27
jinja2==3.1.2
pulsectl==22.3.2
pyudev==0.23.2
gitpython==3.1.26
jinja2==3.0.3
pulsectl==21.10.5
pyudev==0.22.0
ruamel.yaml==0.17.17
securetar==2022.2.0
sentry-sdk==1.5.10
voluptuous==0.13.1
sentry-sdk==1.5.2
voluptuous==0.12.2
dbus-next==0.2.3

View File

@@ -1,14 +1,14 @@
black==22.3.0
black==21.12b0
codecov==2.1.12
coverage==6.3.2
coverage==6.2
flake8-docstrings==1.6.0
flake8==4.0.1
pre-commit==2.18.1
pre-commit==2.17.0
pydocstyle==6.1.1
pylint==2.13.7
pylint==2.12.2
pytest-aiohttp==0.3.0
pytest-asyncio==0.12.0 # NB!: Versions over 0.12.0 breaks pytest-aiohttp (https://github.com/aio-libs/pytest-aiohttp/issues/16)
pytest-cov==3.0.0
pytest-timeout==2.1.0
pytest==7.1.2
pyupgrade==2.32.0
pytest-timeout==2.0.2
pytest==6.2.5
pyupgrade==2.31.0

View File

@@ -3,6 +3,5 @@
# Start Supervisor service
# ==============================================================================
export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2"
export MALLOC_CONF="background_thread:true,metadata_thp:auto"
exec python3 -m supervisor

View File

@@ -31,4 +31,4 @@ do
done
bashio::exit.nok "Watchdog detected issue with Supervisor - taking container down!"
basio::exit.nok "Watchdog detected issue with Supervisor - taking container down!"

View File

@@ -1,4 +0,0 @@
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
-----END PUBLIC KEY-----

View File

@@ -1,8 +0,0 @@
{
"currentcontext": {
"LcHost": "cas.codenotary.com",
"LcPort": "443"
},
"schemaversion": 3,
"users": null
}

View File

@@ -49,7 +49,6 @@ setup(
"supervisor.resolution.evaluations",
"supervisor.resolution.fixups",
"supervisor.resolution",
"supervisor.security",
"supervisor.services.modules",
"supervisor.services",
"supervisor.store",

View File

@@ -178,7 +178,7 @@ class AddonManager(CoreSysAttributes):
await addon.install_apparmor()
try:
await addon.instance.install(store.version, store.image, arch=addon.arch)
await addon.instance.install(store.version, store.image)
except DockerError as err:
self.data.uninstall(addon)
raise AddonsError() from err

View File

@@ -14,7 +14,6 @@ from typing import Any, Awaitable, Final, Optional
import aiohttp
from deepmerge import Merger
from securetar import atomic_contents_add, secure_path
import voluptuous as vol
from voluptuous.humanize import humanize_error
@@ -66,6 +65,7 @@ from ..homeassistant.const import WSEvent, WSType
from ..utils import check_port
from ..utils.apparmor import adjust_profile
from ..utils.json import read_json_file, write_json_file
from ..utils.tar import atomic_contents_add, secure_path
from .const import AddonBackupMode
from .model import AddonModel, Data
from .options import AddonOptions
@@ -748,7 +748,8 @@ class Addon(AddonModel):
def _write_tarfile():
"""Write tar inside loop."""
with tar_file as backup:
# Backup metadata
# Backup system
backup.add(temp, arcname=".")
# Backup data
@@ -815,10 +816,12 @@ class Addon(AddonModel):
try:
data = SCHEMA_ADDON_BACKUP(data)
except vol.Invalid as err:
raise AddonsError(
f"Can't validate {self.slug}, backup data: {humanize_error(data, err)}",
_LOGGER.error,
) from err
_LOGGER.error(
"Can't validate %s, backup data: %s",
self.slug,
humanize_error(data, err),
)
raise AddonsError() from err
# If available
if not self._available(data[ATTR_SYSTEM]):
@@ -890,10 +893,3 @@ class Addon(AddonModel):
return await self.start()
_LOGGER.info("Finished restore for add-on %s", self.slug)
def check_trust(self) -> Awaitable[None]:
"""Calculate Addon docker content trust.
Return Coroutine.
"""
return self.instance.check_trust()

View File

@@ -10,4 +10,3 @@ class AddonBackupMode(str, Enum):
ATTR_BACKUP = "backup"
ATTR_CODENOTARY = "codenotary"

View File

@@ -79,7 +79,7 @@ from ..const import (
)
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.const import Capabilities
from .const import ATTR_BACKUP, ATTR_CODENOTARY
from .const import ATTR_BACKUP
from .options import AddonOptions, UiOptions
from .validate import RE_SERVICE, RE_VOLUME
@@ -503,14 +503,6 @@ class AddonModel(CoreSysAttributes, ABC):
"""Return list of supported machine."""
return self.data.get(ATTR_MACHINE, [])
@property
def arch(self) -> str:
"""Return architecture to use for the addon's image."""
if ATTR_IMAGE in self.data:
return self.sys_arch.match(self.data[ATTR_ARCH])
return self.sys_arch.default
@property
def image(self) -> Optional[str]:
"""Generate image name from data."""
@@ -586,16 +578,6 @@ class AddonModel(CoreSysAttributes, ABC):
"""Return True if the add-on accesses the system journal."""
return self.data[ATTR_JOURNALD]
@property
def signed(self) -> bool:
"""Return True if the image is signed."""
return ATTR_CODENOTARY in self.data
@property
def codenotary(self) -> Optional[str]:
"""Return Signer email address for CAS."""
return self.data.get(ATTR_CODENOTARY)
def __eq__(self, other):
"""Compaired add-on objects."""
if not isinstance(other, AddonModel):

View File

@@ -16,10 +16,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def rating_security(addon: AddonModel) -> int:
"""Return 1-8 for security rating.
"""Return 1-6 for security rating.
1 = not secure
8 = high secure
6 = high secure
"""
rating = 5
@@ -35,10 +35,6 @@ def rating_security(addon: AddonModel) -> int:
elif addon.access_auth_api:
rating += 1
# Signed
if addon.signed:
rating += 1
# Privileged options
if (
any(
@@ -74,7 +70,7 @@ def rating_security(addon: AddonModel) -> int:
if addon.access_docker_api or addon.with_full_access:
rating = 1
return max(min(8, rating), 1)
return max(min(6, rating), 1)
async def remove_data(folder: Path) -> None:

View File

@@ -110,7 +110,7 @@ from ..validate import (
uuid_match,
version_tag,
)
from .const import ATTR_BACKUP, ATTR_CODENOTARY
from .const import ATTR_BACKUP
from .options import RE_SCHEMA_ELEMENT
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -317,7 +317,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce(
AddonBackupMode
),
vol.Optional(ATTR_CODENOTARY): vol.Email(),
vol.Optional(ATTR_OPTIONS, default={}): dict,
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
vol.Schema(

View File

@@ -17,6 +17,7 @@ from .docker import APIDocker
from .hardware import APIHardware
from .homeassistant import APIHomeAssistant
from .host import APIHost
from .info import APIInfo
from .ingress import APIIngress
from .jobs import APIJobs
from .middleware.security import SecurityMiddleware
@@ -26,7 +27,6 @@ from .observer import APIObserver
from .os import APIOS
from .proxy import APIProxy
from .resolution import APIResoulution
from .root import APIRoot
from .security import APISecurity
from .services import APIServices
from .store import APIStore
@@ -35,7 +35,7 @@ from .supervisor import APISupervisor
_LOGGER: logging.Logger = logging.getLogger(__name__)
MAX_CLIENT_SIZE: int = 1024**2 * 16
MAX_CLIENT_SIZE: int = 1024 ** 2 * 16
class RestAPI(CoreSysAttributes):
@@ -70,7 +70,7 @@ class RestAPI(CoreSysAttributes):
self._register_hardware()
self._register_homeassistant()
self._register_host()
self._register_root()
self._register_info()
self._register_ingress()
self._register_multicast()
self._register_network()
@@ -159,7 +159,6 @@ class RestAPI(CoreSysAttributes):
[
web.get("/security/info", api_security.info),
web.post("/security/options", api_security.options),
web.post("/security/integrity", api_security.integrity_check),
]
)
@@ -229,21 +228,12 @@ class RestAPI(CoreSysAttributes):
]
)
def _register_root(self) -> None:
"""Register root functions."""
api_root = APIRoot()
api_root.coresys = self.coresys
def _register_info(self) -> None:
"""Register info functions."""
api_info = APIInfo()
api_info.coresys = self.coresys
self.webapp.add_routes([web.get("/info", api_root.info)])
self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)])
self.webapp.add_routes(
[web.get("/available_updates", api_root.available_updates)]
)
# Remove 2023
self.webapp.add_routes(
[web.get("/supervisor/available_updates", api_root.available_updates)]
)
self.webapp.add_routes([web.get("/info", api_info.info)])
def _register_resolution(self) -> None:
"""Register info functions."""
@@ -294,6 +284,10 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes(
[
web.get(
"/supervisor/available_updates", api_supervisor.available_updates
),
web.post("/refresh_updates", api_supervisor.reload),
web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats),

View File

@@ -106,7 +106,6 @@ from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats
from ..exceptions import APIError, APIForbidden, PwnedError, PwnedSecret
from ..validate import docker_ports
from .const import ATTR_SIGNED
from .utils import api_process, api_process_raw, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -270,7 +269,6 @@ class APIAddons(CoreSysAttributes):
ATTR_IP_ADDRESS: None,
ATTR_TRANSLATIONS: addon.translations,
ATTR_INGRESS: addon.with_ingress,
ATTR_SIGNED: addon.signed,
ATTR_INGRESS_ENTRY: None,
ATTR_INGRESS_URL: None,
ATTR_INGRESS_PORT: None,

View File

@@ -9,11 +9,10 @@ from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT
from ..backups.validate import ALL_FOLDERS
from ..const import (
ATTR_ADDONS,
ATTR_BACKUPS,
ATTR_COMPRESSED,
ATTR_CONTENT,
ATTR_DATE,
ATTR_FOLDERS,
@@ -36,16 +35,13 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
# Backwards compatible / Remove 2022.08
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
# pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_PARTIAL = vol.Schema(
{
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
}
)
@@ -55,14 +51,13 @@ SCHEMA_BACKUP_FULL = vol.Schema(
{
vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
}
)
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
}
)
@@ -91,7 +86,6 @@ class APIBackups(CoreSysAttributes):
ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size,
ATTR_PROTECTED: backup.protected,
ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
ATTR_ADDONS: backup.addon_list,
@@ -134,7 +128,6 @@ class APIBackups(CoreSysAttributes):
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_SIZE: backup.size,
ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected,
ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_ADDONS: data_addons,

View File

@@ -1,22 +1,15 @@
"""Const for API."""
ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_AGENT_VERSION = "agent_version"
ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
ATTR_BROADCAST_MDNS = "broadcast_mdns"
ATTR_DATA_DISK = "data_disk"
ATTR_DEVICE = "device"
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
ATTR_DT_UTC = "dt_utc"
ATTR_FALLBACK = "fallback"
ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_MDNS = "mdns"
ATTR_PANEL_PATH = "panel_path"
ATTR_SIGNED = "signed"
ATTR_STARTUP_TIME = "startup_time"
ATTR_UPDATE_TYPE = "update_type"
ATTR_USE_NTP = "use_ntp"
ATTR_USE_RTC = "use_rtc"
ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_PANEL_PATH = "panel_path"
ATTR_UPDATE_TYPE = "update_type"
ATTR_AVAILABLE_UPDATES = "available_updates"

View File

@@ -26,18 +26,12 @@ from ..const import (
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import dns_server_list, version_tag
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SERVERS): dns_server_list,
vol.Optional(ATTR_FALLBACK): vol.Boolean(),
}
)
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
@@ -55,26 +49,15 @@ class APICoreDNS(CoreSysAttributes):
ATTR_HOST: str(self.sys_docker.network.dns),
ATTR_SERVERS: self.sys_plugins.dns.servers,
ATTR_LOCALS: self.sys_plugins.dns.locals,
ATTR_MDNS: self.sys_plugins.dns.mdns,
ATTR_LLMNR: self.sys_plugins.dns.llmnr,
ATTR_FALLBACK: self.sys_plugins.dns.fallback,
}
@api_process
async def options(self, request: web.Request) -> None:
"""Set DNS options."""
body = await api_validate(SCHEMA_OPTIONS, request)
restart_required = False
if ATTR_SERVERS in body:
self.sys_plugins.dns.servers = body[ATTR_SERVERS]
restart_required = True
if ATTR_FALLBACK in body:
self.sys_plugins.dns.fallback = body[ATTR_FALLBACK]
restart_required = True
if restart_required:
self.sys_create_task(self.sys_plugins.dns.restart())
self.sys_plugins.dns.save_data()

View File

@@ -29,11 +29,8 @@ from .const import (
ATTR_AGENT_VERSION,
ATTR_APPARMOR_VERSION,
ATTR_BOOT_TIMESTAMP,
ATTR_BROADCAST_LLMNR,
ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC,
ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME,
ATTR_USE_NTP,
ATTR_USE_RTC,
@@ -63,7 +60,6 @@ class APIHost(CoreSysAttributes):
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
ATTR_FEATURES: self.sys_host.features,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
ATTR_KERNEL: self.sys_host.info.kernel,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_TIMEZONE: self.sys_host.info.timezone,
@@ -73,8 +69,6 @@ class APIHost(CoreSysAttributes):
ATTR_USE_RTC: self.sys_host.info.use_rtc,
ATTR_STARTUP_TIME: self.sys_host.info.startup_time,
ATTR_BOOT_TIMESTAMP: self.sys_host.info.boot_timestamp,
ATTR_BROADCAST_LLMNR: self.sys_host.info.broadcast_llmnr,
ATTR_BROADCAST_MDNS: self.sys_host.info.broadcast_mdns,
}
@api_process

52
supervisor/api/info.py Normal file
View File

@@ -0,0 +1,52 @@
"""Init file for Supervisor info RESTful API."""
import logging
from typing import Any
from aiohttp import web
from ..const import (
ATTR_ARCH,
ATTR_CHANNEL,
ATTR_DOCKER,
ATTR_FEATURES,
ATTR_HASSOS,
ATTR_HOMEASSISTANT,
ATTR_HOSTNAME,
ATTR_LOGGING,
ATTR_MACHINE,
ATTR_OPERATING_SYSTEM,
ATTR_STATE,
ATTR_SUPERVISOR,
ATTR_SUPPORTED,
ATTR_SUPPORTED_ARCH,
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__)
class APIInfo(CoreSysAttributes):
"""Handle RESTful API for info functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Show system info."""
return {
ATTR_SUPERVISOR: self.sys_supervisor.version,
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
ATTR_HASSOS: self.sys_os.version,
ATTR_DOCKER: self.sys_docker.info.version,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_FEATURES: self.sys_host.features,
ATTR_MACHINE: self.sys_machine,
ATTR_ARCH: self.sys_arch.default,
ATTR_STATE: self.sys_core.state,
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
ATTR_SUPPORTED: self.sys_core.supported,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_LOGGING: self.sys_config.logging,
ATTR_TIMEZONE: self.sys_timezone,
}

View File

@@ -1,14 +1,14 @@
function loadES5() {
var el = document.createElement('script');
el.src = '/api/hassio/app/frontend_es5/entrypoint.ed292e94.js';
el.src = '/api/hassio/app/frontend_es5/entrypoint.5d40ff8b.js';
document.body.appendChild(el);
}
if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) {
loadES5();
} else {
try {
new Function("import('/api/hassio/app/frontend_latest/entrypoint.af342c20.js')")();
new Function("import('/api/hassio/app/frontend_latest/entrypoint.f09e9f8e.js')")();
} catch (err) {
loadES5();
}

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
!function(){"use strict";var t,n,e={14971:function(t,n,e){var r,i,o=e(93217),u=e(69330),a=(e(58556),e(62173)),c=function(t,n,e){if("input"===t){if("type"===n&&"checkbox"===e||"checked"===n||"disabled"===n)return;return""}},f={renderMarkdown:function(t,n){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign({},(0,a.getDefaultWhiteList)(),{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign({},r,{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(t,n),{whiteList:e,onTagAttr:c})}};(0,o.Jj)(f)}},r={};function i(t){var n=r[t];if(void 0!==n)return n.exports;var o=r[t]={exports:{}};return e[t](o,o.exports,i),o.exports}i.m=e,i.x=function(){var t=i.O(void 0,[191,752],(function(){return i(14971)}));return t=i.O(t)},t=[],i.O=function(n,e,r,o){if(!e){var u=1/0;for(s=0;s<t.length;s++){e=t[s][0],r=t[s][1],o=t[s][2];for(var a=!0,c=0;c<e.length;c++)(!1&o||u>=o)&&Object.keys(i.O).every((function(t){return i.O[t](e[c])}))?e.splice(c--,1):(a=!1,o<u&&(u=o));if(a){t.splice(s--,1);var f=r();void 0!==f&&(n=f)}}return n}o=o||0;for(var s=t.length;s>0&&t[s-1][2]>o;s--)t[s]=t[s-1];t[s]=[e,r,o]},i.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return i.d(n,{a:n}),n},i.d=function(t,n){for(var e in n)i.o(n,e)&&!i.o(t,e)&&Object.defineProperty(t,e,{enumerable:!0,get:n[e]})},i.f={},i.e=function(t){return Promise.all(Object.keys(i.f).reduce((function(n,e){return i.f[e](t,n),n}),[]))},i.u=function(t){return{191:"2dbdaab4",752:"829db8ac"}[t]+".js"},i.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},i.p="/api/hassio/app/frontend_es5/",function(){var t={971:1};i.f.i=function(n,e){t[n]||importScripts(i.p+i.u(n))};var n=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=n.push.bind(n);n.push=function(n){var r=n[0],o=n[1],u=n[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)t[r.pop()]=1;e(n)}}(),n=i.x,i.x=function(){return Promise.all([i.e(191),i.e(752)]).then(n)};i.x()}();

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
!function(){"use strict";var r,t,n={77792:function(r,t,n){var e=n(93217),o=(n(58556),n(80339)),u=n.n(o);function i(r){return function(r){if(Array.isArray(r))return a(r)}(r)||function(r){if("undefined"!=typeof Symbol&&null!=r[Symbol.iterator]||null!=r["@@iterator"])return Array.from(r)}(r)||function(r,t){if(!r)return;if("string"==typeof r)return a(r,t);var n=Object.prototype.toString.call(r).slice(8,-1);"Object"===n&&r.constructor&&(n=r.constructor.name);if("Map"===n||"Set"===n)return Array.from(r);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return a(r,t)}(r)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function a(r,t){(null==t||t>r.length)&&(t=r.length);for(var n=0,e=new Array(t);n<t;n++)e[n]=r[n];return e}function f(r){var t=(r||"").match(/("[^"]+"|[^"\s]+)/g);return t?function(r){return t.map((function(t){var n=u().go(t,r,{allowTypo:!0});return n.length>0?Math.max.apply(Math,i(n.map((function(r){return r.score})))):Number.NEGATIVE_INFINITY})).reduce((function(r,t){return r+t}),0)}:function(){return 0}}function l(r,t){return function(r){if(Array.isArray(r))return r}(r)||function(r,t){var n=null==r?null:"undefined"!=typeof Symbol&&r[Symbol.iterator]||r["@@iterator"];if(null==n)return;var e,o,u=[],i=!0,a=!1;try{for(n=n.call(r);!(i=(e=n.next()).done)&&(u.push(e.value),!t||u.length!==t);i=!0);}catch(f){a=!0,o=f}finally{try{i||null==n.return||n.return()}finally{if(a)throw o}}return u}(r,t)||function(r,t){if(!r)return;if("string"==typeof r)return c(r,t);var n=Object.prototype.toString.call(r).slice(8,-1);"Object"===n&&r.constructor&&(n=r.constructor.name);if("Map"===n||"Set"===n)return Array.from(r);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return c(r,t)}(r,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function c(r,t){(null==t||t>r.length)&&(t=r.length);for(var n=0,e=new Array(t);n<t;n++)e[n]=r[n];return e}var s={filterData:function(r,t,n){var e,o=(e=f(n),function(r){return e([r])!==Number.NEGATIVE_INFINITY});return r.filter((function(r){return Object.entries(t).some((function(t){var n=l(t,2),e=n[0],u=n[1];return!(!u.filterable||!o(String(u.filterKey?r[u.valueColumn||e][u.filterKey]:r[u.valueColumn||e])))}))}))},sortData:function(r,t,n,e){return r.sort((function(r,o){var u=1;"desc"===n&&(u=-1);var i=t.filterKey?r[t.valueColumn||e][t.filterKey]:r[t.valueColumn||e],a=t.filterKey?o[t.valueColumn||e][t.filterKey]:o[t.valueColumn||e];return"string"==typeof i&&(i=i.toUpperCase()),"string"==typeof a&&(a=a.toUpperCase()),void 0===i&&void 0!==a?1:void 0===a&&void 0!==i?-1:i<a?-1*u:i>a?1*u:0}))}};(0,e.Jj)(s)}},e={};function o(r){var t=e[r];if(void 0!==t)return t.exports;var u=e[r]={exports:{}};return n[r].call(u.exports,u,u.exports,o),u.exports}o.m=n,o.x=function(){var r=o.O(void 0,[339,191],(function(){return o(77792)}));return r=o.O(r)},r=[],o.O=function(t,n,e,u){if(!n){var i=1/0;for(c=0;c<r.length;c++){n=r[c][0],e=r[c][1],u=r[c][2];for(var a=!0,f=0;f<n.length;f++)(!1&u||i>=u)&&Object.keys(o.O).every((function(r){return o.O[r](n[f])}))?n.splice(f--,1):(a=!1,u<i&&(i=u));if(a){r.splice(c--,1);var l=e();void 0!==l&&(t=l)}}return t}u=u||0;for(var c=r.length;c>0&&r[c-1][2]>u;c--)r[c]=r[c-1];r[c]=[n,e,u]},o.n=function(r){var t=r&&r.__esModule?function(){return r.default}:function(){return r};return o.d(t,{a:t}),t},o.d=function(r,t){for(var n in t)o.o(t,n)&&!o.o(r,n)&&Object.defineProperty(r,n,{enumerable:!0,get:t[n]})},o.f={},o.e=function(r){return Promise.all(Object.keys(o.f).reduce((function(t,n){return o.f[n](r,t),t}),[]))},o.u=function(r){return{191:"2dbdaab4",339:"128021f1"}[r]+".js"},o.o=function(r,t){return Object.prototype.hasOwnProperty.call(r,t)},o.p="/api/hassio/app/frontend_es5/",function(){var r={13:1,792:1};o.f.i=function(t,n){r[t]||importScripts(o.p+o.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],n=t.push.bind(t);t.push=function(t){var e=t[0],u=t[1],i=t[2];for(var a in u)o.o(u,a)&&(o.m[a]=u[a]);for(i&&i(o);e.length;)r[e.pop()]=1;n(t)}}(),t=o.x,o.x=function(){return Promise.all([o.e(339),o.e(191)]).then(t)};o.x()}();

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,10 @@
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,73 +0,0 @@
/**
* @license
* Copyright (c) 2015 - 2022 Vaadin Ltd.
* This program is available under Apache License Version 2.0, available at https://vaadin.com/license/
*/
/**
* @license
* Copyright (c) 2017 - 2022 Vaadin Ltd.
* This program is available under Apache License Version 2.0, available at https://vaadin.com/license/
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2021 - 2022 Vaadin Ltd.
* This program is available under Apache License Version 2.0, available at https://vaadin.com/license/
*/
/**
* @license
* Copyright (c) 2021 Vaadin Ltd.
* This program is available under Apache License Version 2.0, available at https://vaadin.com/license/
*/
/**
* @license
* Copyright 2017 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
/**
* @license
* Copyright 2018 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
/**
* @license
* Copyright 2019 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @license
* Copyright 2021 Google LLC
* SPDX-LIcense-Identifier: Apache-2.0
*/
/**
* @license
* Copyright 2021 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
!function(){"use strict";var r,t,n={5425:function(r,t,n){var e=n(93217);n(58556);function o(r,t){return function(r){if(Array.isArray(r))return r}(r)||function(r,t){var n=null==r?null:"undefined"!=typeof Symbol&&r[Symbol.iterator]||r["@@iterator"];if(null==n)return;var e,o,u=[],i=!0,a=!1;try{for(n=n.call(r);!(i=(e=n.next()).done)&&(u.push(e.value),!t||u.length!==t);i=!0);}catch(f){a=!0,o=f}finally{try{i||null==n.return||n.return()}finally{if(a)throw o}}return u}(r,t)||function(r,t){if(!r)return;if("string"==typeof r)return u(r,t);var n=Object.prototype.toString.call(r).slice(8,-1);"Object"===n&&r.constructor&&(n=r.constructor.name);if("Map"===n||"Set"===n)return Array.from(r);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return u(r,t)}(r,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function u(r,t){(null==t||t>r.length)&&(t=r.length);for(var n=0,e=new Array(t);n<t;n++)e[n]=r[n];return e}var i={filterData:function(r,t,n){return n=n.toUpperCase(),r.filter((function(r){return Object.entries(t).some((function(t){var e=o(t,2),u=e[0],i=e[1];return!(!i.filterable||!String(i.filterKey?r[i.valueColumn||u][i.filterKey]:r[i.valueColumn||u]).toUpperCase().includes(n))}))}))},sortData:function(r,t,n,e){return r.sort((function(r,o){var u=1;"desc"===n&&(u=-1);var i=t.filterKey?r[t.valueColumn||e][t.filterKey]:r[t.valueColumn||e],a=t.filterKey?o[t.valueColumn||e][t.filterKey]:o[t.valueColumn||e];return"string"==typeof i&&(i=i.toUpperCase()),"string"==typeof a&&(a=a.toUpperCase()),void 0===i&&void 0!==a?1:void 0===a&&void 0!==i?-1:i<a?-1*u:i>a?1*u:0}))}};(0,e.Jj)(i)}},e={};function o(r){var t=e[r];if(void 0!==t)return t.exports;var u=e[r]={exports:{}};return n[r](u,u.exports,o),u.exports}o.m=n,o.x=function(){var r=o.O(void 0,[191],(function(){return o(5425)}));return r=o.O(r)},r=[],o.O=function(t,n,e,u){if(!n){var i=1/0;for(c=0;c<r.length;c++){n=r[c][0],e=r[c][1],u=r[c][2];for(var a=!0,f=0;f<n.length;f++)(!1&u||i>=u)&&Object.keys(o.O).every((function(r){return o.O[r](n[f])}))?n.splice(f--,1):(a=!1,u<i&&(i=u));if(a){r.splice(c--,1);var l=e();void 0!==l&&(t=l)}}return t}u=u||0;for(var c=r.length;c>0&&r[c-1][2]>u;c--)r[c]=r[c-1];r[c]=[n,e,u]},o.n=function(r){var t=r&&r.__esModule?function(){return r.default}:function(){return r};return o.d(t,{a:t}),t},o.d=function(r,t){for(var n in t)o.o(t,n)&&!o.o(r,n)&&Object.defineProperty(r,n,{enumerable:!0,get:t[n]})},o.f={},o.e=function(r){return Promise.all(Object.keys(o.f).reduce((function(t,n){return o.f[n](r,t),t}),[]))},o.u=function(r){return"2dbdaab4.js"},o.o=function(r,t){return Object.prototype.hasOwnProperty.call(r,t)},o.p="/api/hassio/app/frontend_es5/",function(){var r={477:1,425:1};o.f.i=function(t,n){r[t]||importScripts(o.p+o.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],n=t.push.bind(t);t.push=function(t){var e=t[0],u=t[1],i=t[2];for(var a in u)o.o(u,a)&&(o.m[a]=u[a]);for(i&&i(o);e.length;)r[e.pop()]=1;n(t)}}(),t=o.x,o.x=function(){return o.e(191).then(t)};o.x()}();

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +0,0 @@
/* @preserve
* Leaflet 1.7.1, a JS library for interactive maps. http://leafletjs.com
* (c) 2010-2019 Vladimir Agafonkin, (c) 2010-2011 CloudMade
*/

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More