Compare commits

..

1 Commits

Author SHA1 Message Date
Mike Degatano
b7c53d9e40 No update available if update cannot be installed on system 2024-06-12 15:58:30 -04:00
3507 changed files with 503205 additions and 7510 deletions

View File

@@ -4,12 +4,8 @@
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
"remoteEnv": {
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
},
"appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_setup",
"postStartCommand": "bash devcontainer_bootstrap",
"postCreateCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": {
"vscode": {
@@ -23,21 +19,17 @@
"GitHub.vscode-pull-request-github"
],
"settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}

View File

@@ -38,7 +38,6 @@
- This PR is related to issue:
- Link to documentation pull request:
- Link to cli pull request:
- Link to client library pull request:
## Checklist
@@ -56,11 +55,9 @@
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
- [ ] Tests have been added to verify that the new code works.
If API endpoints or add-on configuration are added/changed:
If API endpoints of add-on configuration are added/changed:
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
- [ ] [CLI][cli-repository] updated (if necessary)
- [ ] [Client library][client-library-repository] updated (if necessary)
<!--
Thank you for contributing <3
@@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
[cli-repository]: https://github.com/home-assistant/cli
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/

View File

@@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
with:
fetch-depth: 0
@@ -92,7 +92,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
with:
fetch-depth: 0
@@ -106,7 +106,7 @@ jobs:
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: cp312
tag: musllinux_1_2
@@ -125,15 +125,15 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.7.0
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: "v2.4.0"
cosign-release: "v2.2.3"
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
@@ -149,7 +149,7 @@ jobs:
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -160,7 +160,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.03.5
with:
args: |
$BUILD_ARGS \
@@ -178,7 +178,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -203,11 +203,11 @@ jobs:
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.03.5
with:
args: |
--test \

View File

@@ -25,15 +25,15 @@ jobs:
name: Prepare Python dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python
id: python
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -47,7 +47,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -67,15 +67,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -87,7 +87,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -110,15 +110,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -130,7 +130,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -153,7 +153,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,15 +168,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -188,7 +188,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -212,15 +212,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -232,7 +232,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -256,15 +256,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -288,19 +288,19 @@ jobs:
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.7.0
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: "v2.4.0"
cosign-release: "v2.2.3"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -313,7 +313,7 @@ jobs:
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
@@ -335,11 +335,10 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.4.3
uses: actions/upload-artifact@v4.3.3
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
include-hidden-files: true
coverage:
name: Process test coverage
@@ -347,15 +346,15 @@ jobs:
needs: ["pytest", "prepare"]
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.3.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.2
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -366,7 +365,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v4.1.7
- name: Combine coverage results
run: |
. venv/bin/activate
@@ -374,4 +373,4 @@ jobs:
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4.6.0
uses: codecov/codecov-action@v4.4.1

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
with:
fetch-depth: 0

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v4.1.6
- name: Sentry Release
uses: getsentry/action-release@v1.7.0
env:

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.7
rev: v0.2.1
hooks:
- id: ruff
args:

View File

@@ -4,8 +4,7 @@ FROM ${BUILD_FROM}
ENV \
S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
UV_SYSTEM_PYTHON=true
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
ARG \
COSIGN_VERSION \
@@ -27,17 +26,14 @@ RUN \
yaml \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.2.21
&& chmod a+x /usr/bin/cosign
# Install requirements
COPY requirements.txt .
RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \
linux32 uv pip install --no-build -r requirements.txt; \
else \
uv pip install --no-build -r requirements.txt; \
fi \
export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --only-binary=:all: \
-r ./requirements.txt \
&& rm -f requirements.txt
# Install Home Assistant Supervisor

View File

@@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
[development]: https://developers.home-assistant.io/docs/supervisor/development
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.19
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.19
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.19
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.19
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.19
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
@@ -12,7 +12,7 @@ cosign:
base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.*
args:
COSIGN_VERSION: 2.4.0
COSIGN_VERSION: 2.2.3
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor

View File

@@ -31,7 +31,7 @@ include-package-data = true
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.12"
py-version = "3.11"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
@@ -215,9 +215,6 @@ expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pylint.DESIGN]
max-positional-arguments = 10
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".git"]
@@ -231,13 +228,12 @@ filterwarnings = [
]
[tool.ruff]
lint.select = [
select = [
"B002", # Python does not support the unary prefix increment
"B007", # Loop control variable {name} not used within loop body
"B014", # Exception handler with duplicate exception
"B023", # Function definition does not bind loop variable {name}
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
"B904", # Use raise from to specify exception cause
"C", # complexity
"COM818", # Trailing comma on bare tuple prohibited
"D", # docstrings
@@ -251,6 +247,7 @@ lint.select = [
"N804", # First argument of a class method should be named cls
"N805", # First argument of a method should be named self
"N815", # Variable {name} in class scope should not be mixedCase
"PGH001", # No builtin eval() allowed
"PGH004", # Use specific rule codes when using noqa
"PLC0414", # Useless import alias. Import alias does not rename original package.
"PLC", # pylint
@@ -289,12 +286,13 @@ lint.select = [
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY200", # Use raise from to specify exception cause
"TRY302", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]
lint.ignore = [
ignore = [
"D202", # No blank lines allowed after function docstring
"D203", # 1 blank line required before class docstring
"D213", # Multi-line docstring summary should start at the second line
@@ -341,16 +339,16 @@ lint.ignore = [
"PLE0605",
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
[tool.ruff.flake8-import-conventions.extend-aliases]
voluptuous = "vol"
[tool.ruff.lint.flake8-pytest-style]
[tool.ruff.flake8-pytest-style]
fixture-parentheses = false
[tool.ruff.lint.flake8-tidy-imports.banned-api]
[tool.ruff.flake8-tidy-imports.banned-api]
"pytz".msg = "use zoneinfo instead"
[tool.ruff.lint.isort]
[tool.ruff.isort]
force-sort-within-sections = true
section-order = [
"future",
@@ -364,10 +362,10 @@ known-first-party = ["supervisor", "tests"]
combine-as-imports = true
split-on-trailing-comma = false
[tool.ruff.lint.per-file-ignores]
[tool.ruff.per-file-ignores]
# DBus Service Mocks must use typing and names understood by dbus-fast
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
[tool.ruff.lint.mccabe]
[tool.ruff.mccabe]
max-complexity = 25

View File

@@ -1,29 +1,30 @@
aiodns==3.2.0
aiohttp==3.10.10
aiohttp==3.9.5
aiohttp-fast-url-dispatcher==0.3.0
atomicwrites-homeassistant==1.4.1
attrs==24.2.0
awesomeversion==24.6.0
attrs==23.2.0
awesomeversion==24.2.0
brotli==1.1.0
ciso8601==2.3.1
colorlog==6.9.0
cpe==1.3.1
cryptography==43.0.3
debugpy==1.8.7
deepmerge==2.0
dirhash==0.5.0
colorlog==6.8.2
cpe==1.2.1
cryptography==42.0.8
debugpy==1.8.1
deepmerge==1.1.1
dirhash==0.4.0
docker==7.1.0
faust-cchardet==2.1.19
gitpython==3.1.43
jinja2==3.1.4
orjson==3.10.7
pulsectl==24.11.0
orjson==3.9.15
pulsectl==24.4.0
pyudev==0.24.3
PyYAML==6.0.2
PyYAML==6.0.1
requests==2.32.3
securetar==2024.2.1
sentry-sdk==2.18.0
setuptools==75.3.0
voluptuous==0.15.2
dbus-fast==2.24.3
sentry-sdk==2.5.1
setuptools==70.0.0
voluptuous==0.14.2
dbus-fast==2.21.3
typing_extensions==4.12.2
zlib-fast==0.2.0

View File

@@ -1,12 +1,12 @@
coverage==7.6.4
pre-commit==4.0.1
pylint==3.3.1
coverage==7.5.3
pre-commit==3.7.1
pylint==3.2.3
pytest-aiohttp==1.0.5
pytest-asyncio==0.23.6
pytest-cov==6.0.0
pytest-cov==5.0.0
pytest-timeout==2.3.1
pytest==8.3.3
ruff==0.7.2
time-machine==2.16.0
pytest==8.2.2
ruff==0.4.8
time-machine==2.14.1
typing_extensions==4.12.2
urllib3==2.2.3
urllib3==2.2.1

View File

@@ -12,6 +12,7 @@ nvm install
script/bootstrap
# Download translations
start_docker
./script/translations_download
# build frontend
@@ -24,4 +25,6 @@ cp -rf build/* ../../supervisor/api/panel/
# Reset frontend git
cd ..
git reset --hard HEAD
git reset --hard HEAD
stop_docker

View File

@@ -1,5 +1,4 @@
"""Home Assistant Supervisor setup."""
from pathlib import Path
import re

View File

@@ -1,5 +1,4 @@
"""Main file for Supervisor."""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-ons."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
@@ -47,8 +46,6 @@ from ..const import (
ATTR_SLUG,
ATTR_STATE,
ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TYPE,
ATTR_USER,
ATTR_UUID,
@@ -57,7 +54,6 @@ from ..const import (
ATTR_WATCHDOG,
DNS_SUFFIX,
AddonBoot,
AddonBootConfig,
AddonStartup,
AddonState,
BusEvent,
@@ -289,9 +285,13 @@ class Addon(AddonModel):
@property
def need_update(self) -> bool:
"""Return True if an update is available."""
if self.is_detached:
if self.is_detached or self.version == self.latest_version:
return False
return self.version != self.latest_version
with suppress(AddonsNotSupportedError):
self._validate_availability(self.data_store)
return True
return False
@property
def dns(self) -> list[str]:
@@ -312,9 +312,7 @@ class Addon(AddonModel):
@property
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
if self.boot_config == AddonBootConfig.MANUAL_ONLY:
return super().boot
"""Return boot config with prio local settings."""
return self.persist.get(ATTR_BOOT, super().boot)
@boot.setter
@@ -369,37 +367,6 @@ class Addon(AddonModel):
else:
self.persist[ATTR_WATCHDOG] = value
@property
def system_managed(self) -> bool:
"""Return True if addon is managed by Home Assistant."""
return self.persist[ATTR_SYSTEM_MANAGED]
@system_managed.setter
def system_managed(self, value: bool) -> None:
"""Set system managed enable/disable."""
if not value and self.system_managed_config_entry:
self.system_managed_config_entry = None
self.persist[ATTR_SYSTEM_MANAGED] = value
@property
def system_managed_config_entry(self) -> str | None:
"""Return id of config entry managing this addon (if any)."""
if not self.system_managed:
return None
return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY)
@system_managed_config_entry.setter
def system_managed_config_entry(self, value: str | None) -> None:
"""Set ID of config entry managing this addon."""
if not self.system_managed:
_LOGGER.warning(
"Ignoring system managed config entry for %s because it is not system managed",
self.slug,
)
else:
self.persist[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] = value
@property
def uuid(self) -> str:
"""Return an API token for this add-on."""
@@ -766,12 +733,10 @@ class Addon(AddonModel):
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def uninstall(
self, *, remove_config: bool, remove_image: bool = True
) -> None:
async def uninstall(self, *, remove_config: bool) -> None:
"""Uninstall and cleanup this addon."""
try:
await self.instance.remove(remove_image=remove_image)
await self.instance.remove()
except DockerError as err:
raise AddonsError() from err

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on build environment."""
from __future__ import annotations
from functools import cached_property

View File

@@ -1,5 +1,4 @@
"""Add-on static data."""
from datetime import timedelta
from enum import StrEnum

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-on data."""
from copy import deepcopy
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
@@ -185,15 +184,7 @@ class AddonManager(CoreSysAttributes):
_LOGGER.warning("Add-on %s is not installed", slug)
return
shared_image = any(
self.local[slug].image == addon.image
and self.local[slug].version == addon.version
for addon in self.installed
if addon.slug != slug
)
await self.local[slug].uninstall(
remove_config=remove_config, remove_image=not shared_image
)
await self.local[slug].uninstall(remove_config=remove_config)
_LOGGER.info("Add-on '%s' successfully removed", slug)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod
from collections import defaultdict
from collections.abc import Awaitable, Callable
@@ -83,7 +82,6 @@ from ..const import (
SECURITY_DISABLE,
SECURITY_PROFILE,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
)
@@ -150,15 +148,10 @@ class AddonModel(JobGroup, ABC):
"""Return options with local changes."""
return self.data[ATTR_OPTIONS]
@property
def boot_config(self) -> AddonBootConfig:
"""Return boot config."""
return self.data[ATTR_BOOT]
@property
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
return AddonBoot(self.data[ATTR_BOOT])
"""Return boot config with prio local settings."""
return self.data[ATTR_BOOT]
@property
def auto_update(self) -> bool | None:

View File

@@ -1,5 +1,4 @@
"""Add-on Options / UI rendering."""
import hashlib
import logging
from pathlib import Path

View File

@@ -1,5 +1,4 @@
"""Util add-ons functions."""
from __future__ import annotations
import asyncio

View File

@@ -1,5 +1,4 @@
"""Validate add-ons options schema."""
import logging
import re
import secrets
@@ -79,8 +78,6 @@ from ..const import (
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
@@ -98,7 +95,6 @@ from ..const import (
ROLE_ALL,
ROLE_DEFAULT,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
AddonState,
@@ -322,9 +318,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
AddonStartup
),
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
AddonBootConfig
),
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
@@ -473,8 +467,6 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
},
extra=vol.REMOVE_EXTRA,
)

View File

@@ -1,11 +1,11 @@
"""Init file for Supervisor RESTful API."""
from functools import partial
import logging
from pathlib import Path
from typing import Any
from aiohttp import web
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes
@@ -67,6 +67,7 @@ class RestAPI(CoreSysAttributes):
"max_field_size": MAX_LINE_SIZE,
},
)
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
@@ -413,7 +414,6 @@ class RestAPI(CoreSysAttributes):
# No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center.
capture_exception(err)
kwargs.pop("follow", None) # Follow is not supported for Docker logs
return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes(
@@ -510,7 +510,6 @@ class RestAPI(CoreSysAttributes):
web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
web.post(
"/addons/{addon}/options/validate", api_addons.options_validate
),

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -82,8 +81,6 @@ from ..const import (
ATTR_STARTUP,
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TRANSLATIONS,
ATTR_UART,
ATTR_UDEV,
@@ -98,7 +95,6 @@ from ..const import (
ATTR_WEBUI,
REQUEST_FROM,
AddonBoot,
AddonBootConfig,
)
from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats
@@ -110,7 +106,7 @@ from ..exceptions import (
PwnedSecret,
)
from ..validate import docker_ports
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
from .const import ATTR_REMOVE_CONFIG, ATTR_SIGNED
from .utils import api_process, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -130,13 +126,6 @@ SCHEMA_OPTIONS = vol.Schema(
}
)
SCHEMA_SYS_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
}
)
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
SCHEMA_UNINSTALL = vol.Schema(
@@ -189,7 +178,6 @@ class APIAddons(CoreSysAttributes):
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_SYSTEM_MANAGED: addon.system_managed,
}
for addon in self.sys_addons.installed
]
@@ -218,7 +206,6 @@ class APIAddons(CoreSysAttributes):
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT_CONFIG: addon.boot_config,
ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options,
ATTR_SCHEMA: addon.schema_ui,
@@ -278,8 +265,6 @@ class APIAddons(CoreSysAttributes):
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
ATTR_SYSTEM_MANAGED: addon.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
}
return data
@@ -302,10 +287,6 @@ class APIAddons(CoreSysAttributes):
if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body:
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
raise APIError(
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
)
addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body:
addon.auto_update = body[ATTR_AUTO_UPDATE]
@@ -323,20 +304,6 @@ class APIAddons(CoreSysAttributes):
addon.save_persist()
@api_process
async def sys_options(self, request: web.Request) -> None:
"""Store system options for an add-on."""
addon = self.get_addon_for_request(request)
# Validate/Process Body
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
if ATTR_SYSTEM_MANAGED in body:
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
addon.save_persist()
@api_process
async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on."""

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Audio RESTful API."""
import asyncio
from collections.abc import Awaitable
from dataclasses import asdict

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor auth/SSO RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Backups RESTful API."""
import asyncio
from collections.abc import Callable
import errno
@@ -343,9 +342,9 @@ class APIBackups(CoreSysAttributes):
_LOGGER.info("Downloading backup %s", backup.slug)
response = web.FileResponse(backup.tarfile)
response.content_type = CONTENT_TYPE_TAR
response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
)
response.headers[
CONTENT_DISPOSITION
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
return response
@api_process

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HA cli RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -17,7 +17,6 @@ ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes"
ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BACKGROUND = "background"
ATTR_BOOT_CONFIG = "boot_config"
ATTR_BOOT_SLOT = "boot_slot"
ATTR_BOOT_SLOTS = "boot_slots"
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
@@ -37,7 +36,6 @@ ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems"
ATTR_FORCE = "force"
ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor network RESTful API."""
import logging
import voluptuous as vol

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor DNS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor hardware RESTful API."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -35,9 +34,9 @@ from ..const import (
ATTR_WATCHDOG,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError
from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag
from .const import ATTR_FORCE, ATTR_SAFE_MODE
from .const import ATTR_SAFE_MODE
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -67,13 +66,6 @@ SCHEMA_UPDATE = vol.Schema(
SCHEMA_RESTART = vol.Schema(
{
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
SCHEMA_STOP = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
@@ -81,17 +73,6 @@ SCHEMA_STOP = vol.Schema(
class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions."""
async def _check_offline_migration(self, force: bool = False) -> None:
"""Check and raise if there's an offline DB migration in progress."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Offline database migration in progress, try again after it has completed"
)
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return host information."""
@@ -173,7 +154,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request: web.Request) -> None:
"""Update Home Assistant."""
body = await api_validate(SCHEMA_UPDATE, request)
await self._check_offline_migration()
await asyncio.shield(
self.sys_homeassistant.core.update(
@@ -183,12 +163,9 @@ class APIHomeAssistant(CoreSysAttributes):
)
@api_process
async def stop(self, request: web.Request) -> Awaitable[None]:
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop Home Assistant."""
body = await api_validate(SCHEMA_STOP, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_homeassistant.core.stop())
return asyncio.shield(self.sys_homeassistant.core.stop())
@api_process
def start(self, request: web.Request) -> Awaitable[None]:
@@ -199,7 +176,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def restart(self, request: web.Request) -> None:
"""Restart Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
@@ -209,7 +185,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def rebuild(self, request: web.Request) -> None:
"""Rebuild Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])

View File

@@ -4,7 +4,7 @@ import asyncio
from contextlib import suppress
import logging
from aiohttp import ClientConnectionResetError, web
from aiohttp import web
from aiohttp.hdrs import ACCEPT, RANGE
import voluptuous as vol
from voluptuous.error import CoerceInvalid
@@ -28,7 +28,7 @@ from ..const import (
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
from ..exceptions import APIError, HostLogError
from ..host.const import (
PARAM_BOOT_ID,
PARAM_FOLLOW,
@@ -46,7 +46,6 @@ from .const import (
ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC,
ATTR_FORCE,
ATTR_IDENTIFIERS,
ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME,
@@ -61,33 +60,14 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
IDENTIFIER = "identifier"
BOOTID = "bootid"
DEFAULT_LINES = 100
DEFAULT_RANGE = 100
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
# pylint: disable=no-value-for-parameter
SCHEMA_SHUTDOWN = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions."""
async def _check_ha_offline_migration(self, force: bool) -> None:
"""Check if HA has an offline migration in progress and raise if not forced."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
)
@api_process
async def info(self, request):
"""Return host information."""
@@ -129,20 +109,14 @@ class APIHost(CoreSysAttributes):
)
@api_process
async def reboot(self, request):
def reboot(self, request):
"""Reboot host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.reboot())
return asyncio.shield(self.sys_host.control.reboot())
@api_process
async def shutdown(self, request):
def shutdown(self, request):
"""Poweroff host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.shutdown())
return asyncio.shield(self.sys_host.control.shutdown())
@api_process
def reload(self, request):
@@ -222,30 +196,13 @@ class APIHost(CoreSysAttributes):
"supported for now."
)
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
if request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
log_formatter = LogFormatter.VERBOSE
if "lines" in request.query:
lines = request.query.get("lines", DEFAULT_LINES)
try:
lines = int(lines)
except ValueError:
# If the user passed a non-integer value, just use the default instead of error.
lines = DEFAULT_LINES
finally:
# We can't use the entries= Range header syntax to refer to the last 1 line,
# and passing 1 to the calculation below would return the 1st line of the logs
# instead. Since this is really an edge case that doesn't matter much, we'll just
# return 2 lines at minimum.
lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
elif RANGE in request.headers:
if RANGE in request.headers:
range_header = request.headers.get(RANGE)
else:
range_header = (
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
)
range_header = f"entries=:-{DEFAULT_RANGE}:"
async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL
@@ -253,17 +210,9 @@ class APIHost(CoreSysAttributes):
try:
response = web.StreamResponse()
response.content_type = CONTENT_TYPE_TEXT
headers_returned = False
async for cursor, line in journal_logs_reader(resp, log_formatter):
if not headers_returned:
if cursor:
response.headers["X-First-Cursor"] = cursor
await response.prepare(request)
headers_returned = True
# When client closes the connection while reading busy logs, we
# sometimes get this exception. It should be safe to ignore it.
with suppress(ClientConnectionResetError):
await response.write(line.encode("utf-8") + b"\n")
await response.prepare(request)
async for line in journal_logs_reader(resp, log_formatter):
await response.write(line.encode("utf-8") + b"\n")
except ConnectionResetError as ex:
raise APIError(
"Connection reset when trying to fetch data from systemd-journald."

View File

@@ -1,5 +1,4 @@
"""Supervisor Add-on ingress service."""
import asyncio
from ipaddress import ip_address
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Jobs RESTful API."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Handle security part of this API."""
import logging
import re
from typing import Final
@@ -9,8 +8,6 @@ from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion
from supervisor.homeassistant.const import LANDINGPAGE
from ...addons.const import RE_SLUG
from ...const import (
REQUEST_FROM,
@@ -80,13 +77,6 @@ ADDONS_API_BYPASS: Final = re.compile(
r")$"
)
# Home Assistant only
CORE_ONLY_PATHS: Final = re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + "/sys_options"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ROLE_DEFAULT: re.compile(
@@ -242,9 +232,6 @@ class SecurityMiddleware(CoreSysAttributes):
if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
elif CORE_ONLY_PATHS.match(request.path):
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
raise HTTPForbidden()
# Host
if supervisor_token == self.sys_plugins.cli.supervisor_token:
@@ -290,10 +277,8 @@ class SecurityMiddleware(CoreSysAttributes):
@middleware
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy."""
if (
request[REQUEST_FROM] != self.sys_homeassistant
or self.sys_homeassistant.version == LANDINGPAGE
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
self.sys_homeassistant.version, _CORE_VERSION
):
return await handler(request)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Multicast RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging

View File

@@ -1,8 +1,8 @@
"""REST API for network."""
import asyncio
from collections.abc import Awaitable
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
from dataclasses import replace
from ipaddress import ip_address, ip_interface
from typing import Any
from aiohttp import web
@@ -48,28 +48,18 @@ from ..host.configuration import (
Interface,
InterfaceMethod,
IpConfig,
IpSetting,
VlanConfig,
WifiConfig,
)
from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate
_SCHEMA_IPV4_CONFIG = vol.Schema(
_SCHEMA_IP_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
}
)
_SCHEMA_IPV6_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
}
)
@@ -86,18 +76,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
# pylint: disable=no-value-for-parameter
SCHEMA_UPDATE = vol.Schema(
{
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
vol.Optional(ATTR_ENABLED): vol.Boolean(),
}
)
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
"""Return a dict with information about ip configuration."""
return {
ATTR_METHOD: setting.method,
ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
@@ -132,8 +122,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
}
@@ -207,26 +197,24 @@ class APINetwork(CoreSysAttributes):
# Apply config
for key, config in body.items():
if key == ATTR_IPV4:
interface.ipv4setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv4 = replace(
interface.ipv4
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_IPV6:
interface.ipv6setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv6 = replace(
interface.ipv6
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_WIFI:
interface.wifi = WifiConfig(
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
config.get(ATTR_SSID, ""),
config.get(ATTR_AUTH, AuthMethod.OPEN),
config.get(ATTR_PSK, None),
None,
interface.wifi = replace(
interface.wifi
or WifiConfig(
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
),
**config,
)
elif key == ATTR_ENABLED:
interface.enabled = config
@@ -268,22 +256,24 @@ class APINetwork(CoreSysAttributes):
vlan_config = VlanConfig(vlan, interface.name)
ipv4_setting = None
ipv4_config = None
if ATTR_IPV4 in body:
ipv4_setting = IpSetting(
ipv4_config = IpConfig(
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
None,
)
ipv6_setting = None
ipv6_config = None
if ATTR_IPV6 in body:
ipv6_setting = IpSetting(
ipv6_config = IpConfig(
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
None,
)
vlan_interface = Interface(
@@ -294,10 +284,8 @@ class APINetwork(CoreSysAttributes):
True,
False,
InterfaceType.VLAN,
None,
ipv4_setting,
None,
ipv6_setting,
ipv4_config,
ipv6_config,
None,
vlan_config,
)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Observer RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HassOS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging

View File

@@ -1 +1 @@
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12\d|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(1{2}[5-9]|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([2-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[4-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12\d|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12\d|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12\d|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.kEibnOO7vNU.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.QRjNB4gJOsA.js")}else d("/api/hassio/app/frontend_es5/entrypoint.QRjNB4gJOsA.js")}()
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[110],{46875:function(e,n,t){t.d(n,{a:function(){return c}});t(82386);var r=t(9883),a=t(213);function c(e,n){var t=(0,a.m)(e.entity_id),c=void 0!==n?n:null==e?void 0:e.state;if(["button","event","input_button","scene"].includes(t))return c!==r.Hh;if((0,r.g0)(c))return!1;if(c===r.KF&&"alert"!==t)return!1;switch(t){case"alarm_control_panel":return"disarmed"!==c;case"alert":return"idle"!==c;case"cover":case"valve":return"closed"!==c;case"device_tracker":case"person":return"not_home"!==c;case"lawn_mower":return["mowing","error"].includes(c);case"lock":return"locked"!==c;case"media_player":return"standby"!==c;case"vacuum":return!["idle","docked","paused"].includes(c);case"plant":return"problem"===c;case"group":return["on","home","open","locked","problem"].includes(c);case"timer":return"active"===c;case"camera":return"streaming"===c}return!0}},94526:function(e,n,t){t.d(n,{Hg:function(){return r},e0:function(){return a}});t(33994),t(22858),t(88871),t(81027),t(82386),t(97741),t(50693),t(72735),t(26098),t(39790),t(66457),t(55228),t(36604),t(16891),"".concat(location.protocol,"//").concat(location.host);var r=function(e){return e.map((function(e){if("string"!==e.type)return e;switch(e.name){case"username":return Object.assign(Object.assign({},e),{},{autocomplete:"username"});case"password":return Object.assign(Object.assign({},e),{},{autocomplete:"current-password"});case"code":return Object.assign(Object.assign({},e),{},{autocomplete:"one-time-code"});default:return e}}))},a=function(e,n){return e.callWS({type:"auth/sign_path",path:n})}},9883:function(e,n,t){t.d(n,{HV:function(){return c},Hh:function(){return a},KF:function(){return u},ON:function(){return o},g0:function(){return l},s7:function(){return s}});var r=t(99890),a="unavailable",c="unknown",o="on",u="off",s=[a,c],i=[a,c,u],l=(0,r.g)(s);(0,r.g)(i)},54630:function(e,n,t){var r=t(72148);e.exports=/Version\/10(?:\.\d+){1,2}(?: [\w./]+)?(?: Mobile\/\w+)? Safari\//.test(r)},36686:function(e,n,t){var r=t(13113),a=t(93187),c=t(53138),o=t(90924),u=t(22669),s=r(o),i=r("".slice),l=Math.ceil,d=function(e){return function(n,t,r){var o,d,f=c(u(n)),p=a(t),m=f.length,g=void 0===r?" ":c(r);return p<=m||""===g?f:((d=s(g,l((o=p-m)/g.length))).length>o&&(d=i(d,0,o)),e?f+d:d+f)}};e.exports={start:d(!1),end:d(!0)}},79977:function(e,n,t){var r=t(41765),a=t(36686).start;r({target:"String",proto:!0,forced:t(54630)},{padStart:function(e){return a(this,e,arguments.length>1?arguments[1]:void 0)}})}}]);
//# sourceMappingURL=110.N3mhm3V6b1k.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"110.N3mhm3V6b1k.js","mappings":"wMAIO,SAASA,EAAYC,EAAsBC,GAChD,IAAMC,GAASC,EAAAA,EAAAA,GAAcH,EAASI,WAChCC,OAAyBC,IAAVL,EAAsBA,EAAQD,aAAQ,EAARA,EAAUC,MAE7D,GAAI,CAAC,SAAU,QAAS,eAAgB,SAASM,SAASL,GACxD,OAAOG,IAAiBG,EAAAA,GAG1B,IAAIC,EAAAA,EAAAA,IAAmBJ,GACrB,OAAO,EAOT,GAAIA,IAAiBK,EAAAA,IAAkB,UAAXR,EAC1B,OAAO,EAIT,OAAQA,GACN,IAAK,sBACH,MAAwB,aAAjBG,EACT,IAAK,QAEH,MAAwB,SAAjBA,EACT,IAAK,QAaL,IAAK,QACH,MAAwB,WAAjBA,EAZT,IAAK,iBACL,IAAK,SACH,MAAwB,aAAjBA,EACT,IAAK,aACH,MAAO,CAAC,SAAU,SAASE,SAASF,GACtC,IAAK,OACH,MAAwB,WAAjBA,EACT,IAAK,eACH,MAAwB,YAAjBA,EACT,IAAK,SACH,OAAQ,CAAC,OAAQ,SAAU,UAAUE,SAASF,GAGhD,IAAK,QACH,MAAwB,YAAjBA,EACT,IAAK,QACH,MAAO,CAAC,KAAM,OAAQ,OAAQ,SAAU,WAAWE,SAASF,GAC9D,IAAK,QACH,MAAwB,WAAjBA,EACT,IAAK,SACH,MAAwB,cAAjBA,EAGX,OAAO,CACT,C,+MChCuB,GAAHM,OAAMC,SAASC,SAAQ,MAAAF,OAAKC,SAASE,M,IAE5CC,EAA0B,SAACC,GAAsB,OAC5DA,EAAOC,KAAI,SAACC,GACV,GAAmB,WAAfA,EAAMC,KAAmB,OAAOD,EACpC,OAAQA,EAAME,MACZ,IAAK,WACH,OAAAC,OAAAC,OAAAD,OAAAC,OAAA,GAAYJ,GAAK,IAAEK,aAAc,aACnC,IAAK,WACH,OAAAF,OAAAC,OAAAD,OAAAC,OAAA,GAAYJ,GAAK,IAAEK,aAAc,qBACnC,IAAK,OACH,OAAAF,OAAAC,OAAAD,OAAAC,OAAA,GAAYJ,GAAK,IAAEK,aAAc,kBACnC,QACE,OAAOL,EAEb,GAAE,EAESM,EAAgB,SAC3BC,EACAC,GAAY,OACYD,EAAKE,OAAO,CAAER,KAAM,iBAAkBO,KAAAA,GAAO,C,+LC3C1DlB,EAAc,cACdoB,EAAU,UACVC,EAAK,KACLnB,EAAM,MAENoB,EAAqB,CAACtB,EAAaoB,GACnCG,EAAa,CAACvB,EAAaoB,EAASlB,GAEpCD,GAAqBuB,EAAAA,EAAAA,GAAqBF,IAC7BE,EAAAA,EAAAA,GAAqBD,E,wBCR/C,IAAIE,EAAY,EAAQ,OACxBC,EAAOC,QAAU,mEAAmEC,KAAKH,E,wBCDzF,IAAII,EAAc,EAAQ,OACtBC,EAAW,EAAQ,OACnBC,EAAW,EAAQ,OACnBC,EAAU,EAAQ,OAClBC,EAAyB,EAAQ,OACjCC,EAASL,EAAYG,GACrBG,EAAcN,EAAY,GAAGO,OAC7BC,EAAOC,KAAKD,KAGZE,EAAe,SAAUC,GAC3B,OAAO,SAAUC,EAAOC,EAAWC,GACjC,IAIIC,EAASC,EAJTC,EAAIf,EAASE,EAAuBQ,IACpCM,EAAejB,EAASY,GACxBM,EAAeF,EAAEG,OACjBC,OAAyBpD,IAAf6C,EAA2B,IAAMZ,EAASY,GAExD,OAAII,GAAgBC,GAA4B,KAAZE,EAAuBJ,IAE3DD,EAAeX,EAAOgB,EAASb,GAD/BO,EAAUG,EAAeC,GACqBE,EAAQD,UACrCA,OAASL,IAASC,EAAeV,EAAYU,EAAc,EAAGD,IACxEJ,EAASM,EAAID,EAAeA,EAAeC,EACpD,CACF,EACApB,EAAOC,QAAU,CAGfwB,MAAOZ,GAAa,GAGpBa,IAAKb,GAAa,G,wBC/BpB,IAAIc,EAAI,EAAQ,OACZC,EAAY,eAKhBD,EAAE,CACAE,OAAQ,SACRC,OAAO,EACPC,OAPe,EAAQ,QAQtB,CACDC,SAAU,SAAkBhB,GAC1B,OAAOY,EAAUK,KAAMjB,EAAWkB,UAAUX,OAAS,EAAIW,UAAU,QAAK9D,EAC1E,G","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20241105.0/src/common/entity/state_active.ts","https://raw.githubusercontent.com/home-assistant/frontend/20241105.0/src/data/auth.ts","https://raw.githubusercontent.com/home-assistant/frontend/20241105.0/src/data/entity.ts","/unknown/node_modules/core-js/internals/string-pad-webkit-bug.js","/unknown/node_modules/core-js/internals/string-pad.js","/unknown/node_modules/core-js/modules/es.string.pad-start.js"],"names":["stateActive","stateObj","state","domain","computeDomain","entity_id","compareState","undefined","includes","UNAVAILABLE","isUnavailableState","OFF","concat","location","protocol","host","autocompleteLoginFields","schema","map","field","type","name","Object","assign","autocomplete","getSignedPath","hass","path","callWS","UNKNOWN","ON","UNAVAILABLE_STATES","OFF_STATES","arrayLiteralIncludes","userAgent","module","exports","test","uncurryThis","toLength","toString","$repeat","requireObjectCoercible","repeat","stringSlice","slice","ceil","Math","createMethod","IS_END","$this","maxLength","fillString","fillLen","stringFiller","S","intMaxLength","stringLength","length","fillStr","start","end","$","$padStart","target","proto","forced","padStart","this","arguments"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +0,0 @@
/**
* @license
* Copyright 2024 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1306],{21306:function(e,t,r){r.r(t),r.d(t,{HaFormGrid:function(){return v}});var a,i,o,n=r(64599),d=r(33994),u=r(22858),s=r(35806),c=r(71008),l=r(62193),h=r(2816),m=r(27927),f=r(35890),p=(r(81027),r(97741),r(16891),r(36185),r(50289)),k=r(29818),v=(0,m.A)([(0,k.EM)("ha-form-grid")],(function(e,t){var r,m=function(t){function r(){var t;(0,c.A)(this,r);for(var a=arguments.length,i=new Array(a),o=0;o<a;o++)i[o]=arguments[o];return t=(0,l.A)(this,r,[].concat(i)),e(t),t}return(0,h.A)(r,t),(0,s.A)(r)}(t);return{F:m,d:[{kind:"field",decorators:[(0,k.MZ)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,k.MZ)({attribute:!1})],key:"data",value:void 0},{kind:"field",decorators:[(0,k.MZ)({attribute:!1})],key:"schema",value:void 0},{kind:"field",decorators:[(0,k.MZ)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,k.MZ)({attribute:!1})],key:"computeLabel",value:void 0},{kind:"field",decorators:[(0,k.MZ)({attribute:!1})],key:"computeHelper",value:void 0},{kind:"field",decorators:[(0,k.MZ)({attribute:!1})],key:"localizeValue",value:void 0},{kind:"method",key:"focus",value:(r=(0,u.A)((0,d.A)().mark((function e(){var t;return(0,d.A)().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:return e.next=2,this.updateComplete;case 2:null===(t=this.renderRoot.querySelector("ha-form"))||void 0===t||t.focus();case 3:case"end":return e.stop()}}),e,this)}))),function(){return r.apply(this,arguments)})},{kind:"method",key:"updated",value:function(e){(0,f.A)(m,"updated",this,3)([e]),e.has("schema")&&(this.schema.column_min_width?this.style.setProperty("--form-grid-min-width",this.schema.column_min_width):this.style.setProperty("--form-grid-min-width",""))}},{kind:"method",key:"render",value:function(){var e=this;return(0,p.qy)(a||(a=(0,n.A)([" "," "])),this.schema.schema.map((function(t){return(0,p.qy)(i||(i=(0,n.A)([' <ha-form .hass="','" .data="','" .schema="','" .disabled="','" .computeLabel="','" .computeHelper="','" .localizeValue="','"></ha-form> '])),e.hass,e.data,[t],e.disabled,e.computeLabel,e.computeHelper,e.localizeValue)})))}},{kind:"get",static:!0,key:"styles",value:function(){return(0,p.AH)(o||(o=(0,n.A)([":host{display:grid!important;grid-template-columns:repeat(var(--form-grid-column-count,auto-fit),minmax(var(--form-grid-min-width,200px),1fr));grid-column-gap:8px;grid-row-gap:24px}:host>ha-form{display:block}"])))}}]}}),p.WF)}}]);
//# sourceMappingURL=1306.EM6Ds30Fu3A.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1306.EM6Ds30Fu3A.js","mappings":"iWAaaA,GAAUC,EAAAA,EAAAA,GAAA,EADtBC,EAAAA,EAAAA,IAAc,kBAAe,SAAAC,EAAAC,GAAA,IAmBGC,EAlBpBL,EAAU,SAAAM,GAAA,SAAAN,IAAA,IAAAO,GAAAC,EAAAA,EAAAA,GAAA,KAAAR,GAAA,QAAAS,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,GAAAQ,EAAAA,EAAAA,GAAA,KAAAf,EAAA,GAAAgB,OAAAJ,IAAAT,EAAAI,GAAAA,CAAA,QAAAU,EAAAA,EAAAA,GAAAjB,EAAAM,IAAAY,EAAAA,EAAAA,GAAAlB,EAAA,EAAAI,GAAA,OAAAe,EAAVnB,EAAUoB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACpBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,eAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAK9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAI9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,QAAAC,OAAArB,GAAAwB,EAAAA,EAAAA,IAAAC,EAAAA,EAAAA,KAAAC,MAI/B,SAAAC,IAAA,IAAAC,EAAA,OAAAH,EAAAA,EAAAA,KAAAI,MAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAE,KAAA,EACQC,KAAKC,eAAc,OACe,QAAxCN,EAAAK,KAAKE,WAAWC,cAAc,kBAAU,IAAAR,GAAxCA,EAA0CS,QAAQ,wBAAAP,EAAAQ,OAAA,GAAAX,EAAA,UACnD,WAHiB,OAAA3B,EAAAuC,MAAC,KAADlC,UAAA,KAAAW,KAAA,SAAAI,IAAA,UAAAC,MAKlB,SAAkBmB,IAChBC,EAAAA,EAAAA,GA5BS9C,EAAU,iBA4BnB8C,CA5BmB,CA4BLD,IACVA,EAAaE,IAAI,YACfT,KAAKU,OAAOC,iBACdX,KAAKY,MAAMC,YACT,wBACAb,KAAKU,OAAOC,kBAGdX,KAAKY,MAAMC,YAAY,wBAAyB,IAGtD,GAAC,CAAA9B,KAAA,SAAAI,IAAA,SAAAC,MAED,WAAmC,IAAA0B,EAAA,KACjC,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,YACPjB,KAAKU,OAAOA,OAAOQ,KACnB,SAACC,GAAI,OAAKJ,EAAAA,EAAAA,IAAIK,IAAAA,GAAAH,EAAAA,EAAAA,GAAA,gJAEFH,EAAKO,KACLP,EAAKQ,KACH,CAACH,GACCL,EAAKS,SACDT,EAAKU,aACJV,EAAKW,cACLX,EAAKY,cAAa,IAK7C,GAAC,CAAA3C,KAAA,MAAA4C,QAAA,EAAAxC,IAAA,SAAAC,MAED,WACE,OAAOwC,EAAAA,EAAAA,IAAGC,IAAAA,GAAAZ,EAAAA,EAAAA,GAAA,wNAcZ,IAAC,GA1E6Ba,EAAAA,G","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20241105.0/src/components/ha-form/ha-form-grid.ts"],"names":["HaFormGrid","_decorate","customElement","_initialize","_LitElement","_focus","_LitElement2","_this","_classCallCheck","_len","arguments","length","args","Array","_key","_callSuper","concat","_inherits","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_asyncToGenerator","_regeneratorRuntime","mark","_callee","_this$renderRoot$quer","wrap","_context","prev","next","this","updateComplete","renderRoot","querySelector","focus","stop","apply","changedProps","_superPropGet","has","schema","column_min_width","style","setProperty","_this2","html","_templateObject","_taggedTemplateLiteral","map","item","_templateObject2","hass","data","disabled","computeLabel","computeHelper","localizeValue","static","css","_templateObject3","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +0,0 @@
/**
* @license
* Copyright 2021 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
!function(){"use strict";var n,t,e={14595:function(n,t,e){e(58556);var r,i,o=e(93217),u=e(422),a=e(62173),s=function(n,t,e){if("input"===n){if("type"===t&&"checkbox"===e||"checked"===t||"disabled"===t)return;return""}},c={renderMarkdown:function(n,t){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign(Object.assign({},(0,a.getDefaultWhiteList)()),{},{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign(Object.assign({},r),{},{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(n,t),{whiteList:e,onTagAttr:s})}};(0,o.Jj)(c)}},r={};function i(n){var t=r[n];if(void 0!==t)return t.exports;var o=r[n]={exports:{}};return e[n](o,o.exports,i),o.exports}i.m=e,i.x=function(){var n=i.O(void 0,[9191,215],(function(){return i(14595)}));return n=i.O(n)},n=[],i.O=function(t,e,r,o){if(!e){var u=1/0;for(f=0;f<n.length;f++){e=n[f][0],r=n[f][1],o=n[f][2];for(var a=!0,s=0;s<e.length;s++)(!1&o||u>=o)&&Object.keys(i.O).every((function(n){return i.O[n](e[s])}))?e.splice(s--,1):(a=!1,o<u&&(u=o));if(a){n.splice(f--,1);var c=r();void 0!==c&&(t=c)}}return t}o=o||0;for(var f=n.length;f>0&&n[f-1][2]>o;f--)n[f]=n[f-1];n[f]=[e,r,o]},i.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return i.d(t,{a:t}),t},i.d=function(n,t){for(var e in t)i.o(t,e)&&!i.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},i.f={},i.e=function(n){return Promise.all(Object.keys(i.f).reduce((function(t,e){return i.f[e](n,t),t}),[]))},i.u=function(n){return n+"-"+{215:"FPZmDYZTPdk",9191:"37260H-osZ4"}[n]+".js"},i.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},i.p="/api/hassio/app/frontend_es5/",function(){var n={1402:1};i.f.i=function(t,e){n[t]||importScripts(i.p+i.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var r=t[0],o=t[1],u=t[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)n[r.pop()]=1;e(t)}}(),t=i.x,i.x=function(){return Promise.all([i.e(9191),i.e(215)]).then(t)};i.x()}();
//# sourceMappingURL=1402-6WKUruvoXtM.js.map

Some files were not shown because too many files have changed in this diff Show More