mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-19 22:19:21 +00:00
Compare commits
75 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
164c403d05 | ||
![]() |
5e8007453f | ||
![]() |
0a0d97b084 | ||
![]() |
eb604ed92d | ||
![]() |
c47828dbaa | ||
![]() |
ea437dc745 | ||
![]() |
c16a208b39 | ||
![]() |
55d803b2a0 | ||
![]() |
611f6f2829 | ||
![]() |
b94df76731 | ||
![]() |
218619e7f0 | ||
![]() |
273eed901a | ||
![]() |
8ea712a937 | ||
![]() |
658449a7a0 | ||
![]() |
968c471591 | ||
![]() |
b4665f3907 | ||
![]() |
496cee1ec4 | ||
![]() |
0f8c80f3ba | ||
![]() |
6c28f82239 | ||
![]() |
def32abb57 | ||
![]() |
f57a241b9e | ||
![]() |
11a7e8b15d | ||
![]() |
fa4f7697b7 | ||
![]() |
6098b7de8e | ||
![]() |
0a382ce54d | ||
![]() |
dd53aaa30c | ||
![]() |
31e175a15a | ||
![]() |
4c80727bcc | ||
![]() |
b2c3157361 | ||
![]() |
dc4f38ebd0 | ||
![]() |
7c9437c6ee | ||
![]() |
9ce9e10dfd | ||
![]() |
4e94043bca | ||
![]() |
749d45bf13 | ||
![]() |
ce99b3e259 | ||
![]() |
2c84daefab | ||
![]() |
dc1933fa88 | ||
![]() |
6970cebf80 | ||
![]() |
a234006de2 | ||
![]() |
2484149323 | ||
![]() |
778148424c | ||
![]() |
55f4a2395e | ||
![]() |
5a45d47ed8 | ||
![]() |
da601d1483 | ||
![]() |
e98a1272e9 | ||
![]() |
90e9cf788b | ||
![]() |
ec387c3010 | ||
![]() |
7e5a960c98 | ||
![]() |
f1bcbf2416 | ||
![]() |
bce144e197 | ||
![]() |
86a3735d83 | ||
![]() |
decf254e5f | ||
![]() |
e10fe16f21 | ||
![]() |
996891a740 | ||
![]() |
7385d026ea | ||
![]() |
09f43d6f3c | ||
![]() |
6906e757dd | ||
![]() |
963d242afa | ||
![]() |
3ed7cbe2ed | ||
![]() |
0da924f10b | ||
![]() |
76411da0a7 | ||
![]() |
ce87a72cf0 | ||
![]() |
f8c9e2f295 | ||
![]() |
00af027e51 | ||
![]() |
c91fce3281 | ||
![]() |
fb6df18ce9 | ||
![]() |
31f5c6f938 | ||
![]() |
d3a44b2992 | ||
![]() |
b537a03e6d | ||
![]() |
46093379e4 | ||
![]() |
1b17d90504 | ||
![]() |
7d42dd7ac2 | ||
![]() |
f35dcfcfd3 | ||
![]() |
c4f223c38a | ||
![]() |
71362f2c76 |
17
.github/stale.yml
vendored
Normal file
17
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
5
.hadolint.yaml
Normal file
5
.hadolint.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
ignored:
|
||||
- DL3018
|
||||
- DL3006
|
||||
- DL3013
|
||||
- SC2155
|
27
API.md
27
API.md
@@ -44,6 +44,8 @@ The addons from `addons` are only installed one.
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"ip_address": "ip address",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
@@ -80,6 +82,8 @@ Optional:
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
@@ -516,7 +520,8 @@ Get all available addons.
|
||||
"ingress": "bool",
|
||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||
"ingress_port": "null|int"
|
||||
"ingress_port": "null|int",
|
||||
"ingress_panel": "null|bool"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -537,7 +542,8 @@ Get all available addons.
|
||||
},
|
||||
"options": {},
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0"
|
||||
"audio_input": "null|0,0",
|
||||
"ingress_panel": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -602,6 +608,23 @@ Create a new Session for access to ingress service.
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/ingress/panels`
|
||||
|
||||
Return a list of enabled panels.
|
||||
|
||||
```json
|
||||
{
|
||||
"panels": {
|
||||
"addon_slug": {
|
||||
"enable": "boolean",
|
||||
"icon": "mdi:...",
|
||||
"title": "title",
|
||||
"admin": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- VIEW `/ingress/{token}`
|
||||
|
||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||
|
17
Dockerfile
17
Dockerfile
@@ -1,6 +1,8 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
ARG BUILD_ARCH
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
@@ -14,20 +16,13 @@ RUN apk add --no-cache \
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
g++ \
|
||||
openssl-dev \
|
||||
libffi-dev \
|
||||
musl-dev \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --find-links https://wheels.hass.io/alpine-3.9/${BUILD_ARCH}/ \
|
||||
-r /usr/src/requirements.txt \
|
||||
&& rm -f /usr/src/requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||
&& rm -rf /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir -e /usr/src/hassio
|
||||
|
||||
CMD [ "python3", "-m", "hassio" ]
|
||||
|
@@ -1,3 +1,5 @@
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
# Hass.io
|
||||
|
||||
## First private cloud solution for home automation
|
||||
@@ -18,9 +20,9 @@ Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus on updater.json
|
||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
@@ -1,45 +1,196 @@
|
||||
# Python package
|
||||
# Create and test a Python package on multiple Python versions.
|
||||
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
|
||||
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
- master
|
||||
- dev
|
||||
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
exclude:
|
||||
- untagged*
|
||||
pr:
|
||||
- dev
|
||||
- dev
|
||||
variables:
|
||||
- name: versionHadolint
|
||||
value: 'v1.16.3'
|
||||
- name: versionBuilder
|
||||
value: '3.2'
|
||||
- name: versionWheels
|
||||
value: '0.6'
|
||||
- group: docker
|
||||
- group: wheels
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
- job: "Tox"
|
||||
|
||||
- job: 'Tox'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install tox
|
||||
displayName: 'Install Tox'
|
||||
- script: tox
|
||||
displayName: 'Run Tox'
|
||||
|
||||
|
||||
- job: 'Black'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python $(python.version)'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
|
||||
- script: pip install tox
|
||||
displayName: 'Install Tox'
|
||||
|
||||
- script: tox
|
||||
displayName: 'Run Tox'
|
||||
- script: pip install black
|
||||
displayName: 'Install black'
|
||||
- script: black --check hassio tests
|
||||
displayName: 'Run Black'
|
||||
|
||||
|
||||
- job: "JQ"
|
||||
|
||||
- job: 'JQ'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: 'Install JQ'
|
||||
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: 'Run JQ'
|
||||
|
||||
|
||||
- job: 'Hadolint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: 'Install Hadolint'
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: 'Run Hadolint'
|
||||
|
||||
|
||||
- job: 'Wheels'
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 3
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
curl
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: 'Initial cross build'
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: 'Install ssh key'
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: 'Install wheels builder'
|
||||
- script: |
|
||||
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/hassio/master/requirements.txt
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;libffi-dev;openssl-dev" \
|
||||
--index $(wheelsIndex) \
|
||||
--requirement requirements.txt \
|
||||
--requirement-diff requirements_diff.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: 'Run wheels build'
|
||||
|
||||
|
||||
- job: 'ReleaseDEV'
|
||||
condition: and(eq(variables['Build.SourceBranchName'], 'dev'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('Wheels'))
|
||||
dependsOn:
|
||||
- 'JQ'
|
||||
- 'Tox'
|
||||
- 'Hadolint'
|
||||
- 'Wheels'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor --all -t /data --version dev --docker-hub homeassistant
|
||||
displayName: 'Build DEV'
|
||||
|
||||
|
||||
- job: 'VersionValidate'
|
||||
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: 'Check version of branch/tag'
|
||||
|
||||
|
||||
- job: 'Release'
|
||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('VersionValidate'))
|
||||
dependsOn:
|
||||
- 'JQ'
|
||||
- 'Tox'
|
||||
- 'Hadolint'
|
||||
- 'VersionValidate'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor --all -t /data --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
||||
|
@@ -40,6 +40,7 @@ if __name__ == "__main__":
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
@@ -1,158 +1,248 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from ..const import BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonsError,
|
||||
AddonsNotSupportedError,
|
||||
DockerAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..store.addon import AddonStore
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import AddonsData
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.data = AddonsData(coresys)
|
||||
self.addons_obj = {}
|
||||
self.repositories_obj = {}
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: Dict[str, Addon] = {}
|
||||
self.store: Dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def list_addons(self):
|
||||
def all(self) -> List[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
return list(self.addons_obj.values())
|
||||
addons = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed add-ons."""
|
||||
return [addon for addon in self.addons_obj.values()
|
||||
if addon.is_installed]
|
||||
def installed(self) -> List[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of add-on repositories."""
|
||||
return list(self.repositories_obj.values())
|
||||
def get(self, addon_slug: str) -> Optional[AnyAddon]:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return an add-on from slug."""
|
||||
return self.addons_obj.get(addon_slug)
|
||||
Prio:
|
||||
1 - Local
|
||||
2 - Store
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
return self.store.get(addon_slug)
|
||||
|
||||
def from_token(self, token):
|
||||
def from_token(self, token: str) -> Optional[Addon]:
|
||||
"""Return an add-on from Hass.io token."""
|
||||
for addon in self.list_addons:
|
||||
if addon.is_installed and token == addon.hassio_token:
|
||||
for addon in self.installed:
|
||||
if token == addon.hassio_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self):
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
self.data.reload()
|
||||
|
||||
# Init Hass.io built-in repositories
|
||||
repositories = \
|
||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# Init custom repositories and load add-ons
|
||||
await self.load_repositories(repositories)
|
||||
|
||||
async def reload(self):
|
||||
"""Update add-ons from repository and reload list."""
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories_obj.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
|
||||
# update addons
|
||||
await self.load_addons()
|
||||
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories_obj)
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.coresys, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories_obj[url] = repository
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.sys_config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories_obj.pop(url).remove()
|
||||
self.sys_config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
self.data.reload()
|
||||
await self.load_addons()
|
||||
|
||||
async def load_addons(self):
|
||||
"""Update/add internal add-on store."""
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons_obj)
|
||||
del_addons = set(self.addons_obj) - all_addons
|
||||
|
||||
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(self.coresys, addon_slug)
|
||||
|
||||
for slug in self.data.system:
|
||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||
tasks.append(addon.load())
|
||||
self.addons_obj[addon_slug] = addon
|
||||
|
||||
# Run initial tasks
|
||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons_obj.pop(addon_slug)
|
||||
|
||||
async def boot(self, stage):
|
||||
async def boot(self, stage: str) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
for addon in self.installed:
|
||||
if addon.boot != BOOT_AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.start())
|
||||
|
||||
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
|
||||
_LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage):
|
||||
async def shutdown(self, stage: str) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and \
|
||||
await addon.state() == STATE_STARTED and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.stop())
|
||||
for addon in self.installed:
|
||||
if await addon.state() != STATE_STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.stop())
|
||||
|
||||
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
|
||||
_LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
if slug in self.local:
|
||||
_LOGGER.warning("Add-on %s is already installed", slug)
|
||||
return
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
_LOGGER.error("Add-on %s not exists", slug)
|
||||
raise AddonsError()
|
||||
|
||||
if not store.available:
|
||||
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
self.data.install(store)
|
||||
addon = Addon(self.coresys, slug)
|
||||
|
||||
if not addon.path_data.is_dir():
|
||||
_LOGGER.info("Create Home Assistant add-on data folder %s", addon.path_data)
|
||||
addon.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
try:
|
||||
await addon.instance.install(store.version, store.image)
|
||||
except DockerAPIError:
|
||||
self.data.uninstall(addon)
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.local[slug] = addon
|
||||
|
||||
async def uninstall(self, slug: str) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
addon = self.local.get(slug)
|
||||
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
await addon.remove_data()
|
||||
|
||||
# Cleanup audio settings
|
||||
if addon.path_asound.exists():
|
||||
with suppress(OSError):
|
||||
addon.path_asound.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
await addon.uninstall_apparmor()
|
||||
|
||||
# Cleanup internal data
|
||||
addon.remove_discovery()
|
||||
self.data.uninstall(addon)
|
||||
self.local.pop(slug)
|
||||
|
||||
async def update(self, slug: str) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
|
||||
if addon.version == store.version:
|
||||
_LOGGER.warning("No update available for add-on %s", slug)
|
||||
return
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
if not store.available:
|
||||
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Update instance
|
||||
last_state = await addon.state()
|
||||
try:
|
||||
await addon.instance.update(store.version, store.image)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
self.data.update(store)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await addon.start()
|
||||
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
_LOGGER.error("Version changed, use Update instead Rebuild")
|
||||
raise AddonsError()
|
||||
if not addon.need_build:
|
||||
_LOGGER.error("Can't rebuild a image based add-on")
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# remove docker container but not addon config
|
||||
last_state = await addon.state()
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
await addon.instance.install(addon.version)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.data.update(store)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await addon.start()
|
||||
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore")
|
||||
addon = Addon(self.coresys, slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore")
|
||||
addon = self.local[slug]
|
||||
|
||||
await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug in self.local:
|
||||
return
|
||||
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -9,34 +9,31 @@ from ..utils.json import JsonConfig
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
from . import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str) -> None:
|
||||
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
||||
"""Initialize Hass.io add-on builder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._id: str = slug
|
||||
self.addon = addon
|
||||
|
||||
super().__init__(
|
||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||
Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG
|
||||
)
|
||||
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
|
||||
@property
|
||||
def addon(self) -> Addon:
|
||||
"""Return add-on of build data."""
|
||||
return self.sys_addons.get(self._id)
|
||||
raise RuntimeError()
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
"""Base images for this add-on."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch.default,
|
||||
f"homeassistant/{self.sys_arch.default}-base:latest")
|
||||
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
|
||||
)
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
@@ -51,28 +48,28 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
def get_docker_args(self, version):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
'path': str(self.addon.path_location),
|
||||
'tag': f"{self.addon.image}:{version}",
|
||||
'pull': True,
|
||||
'forcerm': True,
|
||||
'squash': self.squash,
|
||||
'labels': {
|
||||
'io.hass.version': version,
|
||||
'io.hass.arch': self.sys_arch.default,
|
||||
'io.hass.type': META_ADDON,
|
||||
'io.hass.name': self._fix_label('name'),
|
||||
'io.hass.description': self._fix_label('description'),
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{self.addon.image}:{version}",
|
||||
"pull": True,
|
||||
"forcerm": True,
|
||||
"squash": self.squash,
|
||||
"labels": {
|
||||
"io.hass.version": version,
|
||||
"io.hass.arch": self.sys_arch.default,
|
||||
"io.hass.type": META_ADDON,
|
||||
"io.hass.name": self._fix_label("name"),
|
||||
"io.hass.description": self._fix_label("description"),
|
||||
},
|
||||
'buildargs': {
|
||||
'BUILD_FROM': self.base_image,
|
||||
'BUILD_VERSION': version,
|
||||
'BUILD_ARCH': self.sys_arch.default,
|
||||
"buildargs": {
|
||||
"BUILD_FROM": self.base_image,
|
||||
"BUILD_VERSION": version,
|
||||
"BUILD_ARCH": self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if self.addon.url:
|
||||
args['labels']['io.hass.url'] = self.addon.url
|
||||
args["labels"]["io.hass.url"] = self.addon.url
|
||||
|
||||
return args
|
||||
|
||||
|
@@ -1,38 +1,34 @@
|
||||
"""Init file for Hass.io add-on data."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
from typing import Any, Dict
|
||||
|
||||
from ..const import (
|
||||
ATTR_LOCATON,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SLUG,
|
||||
ATTR_IMAGE,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_USER,
|
||||
ATTR_VERSION,
|
||||
FILE_HASSIO_ADDONS,
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import JsonFileError
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from ..store.addon import AddonStore
|
||||
from .addon import Addon
|
||||
from .validate import SCHEMA_ADDONS_FILE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
Config = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Hold data for Add-ons inside Hass.io."""
|
||||
"""Hold data for installed Add-ons inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||
self.coresys = coresys
|
||||
self._repositories = {}
|
||||
self._cache = {}
|
||||
self.coresys: CoreSys = coresys
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
@@ -44,93 +40,34 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""Return add-on data from cache/repositories."""
|
||||
return self._cache
|
||||
def install(self, addon: AddonStore) -> None:
|
||||
"""Set addon as installed."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
}
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return add-on data from repositories."""
|
||||
return self._repositories
|
||||
def uninstall(self, addon: Addon) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self.system.pop(addon.slug, None)
|
||||
self.user.pop(addon.slug, None)
|
||||
self.save_data()
|
||||
|
||||
def reload(self):
|
||||
"""Read data from add-on repository."""
|
||||
self._cache = {}
|
||||
self._repositories = {}
|
||||
def update(self, addon: AddonStore) -> None:
|
||||
"""Update version of add-on."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug].update(
|
||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||
)
|
||||
self.save_data()
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self.user[slug] = deepcopy(user)
|
||||
self.system[slug] = deepcopy(system)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self.sys_config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning(
|
||||
"Can't read repository information from %s", repository_file
|
||||
)
|
||||
return
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||
return
|
||||
|
||||
# process data
|
||||
self._repositories[slug] = repository_info
|
||||
self._read_addons_folder(path, slug)
|
||||
|
||||
def _read_addons_folder(self, path, repository):
|
||||
"""Read data from add-ons folder."""
|
||||
for addon in path.glob("**/config.json"):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read %s from repository %s", addon, repository)
|
||||
continue
|
||||
|
||||
# validate
|
||||
try:
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning(
|
||||
"Can't read %s: %s", addon, humanize_error(addon_config, ex)
|
||||
)
|
||||
continue
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
"""Add local built-in repository into dataset."""
|
||||
try:
|
||||
builtin_file = Path(__file__).parent.joinpath("built-in.json")
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read built-in json")
|
||||
return
|
||||
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = builtin_data[REPOSITORY_CORE]
|
||||
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = builtin_data[REPOSITORY_LOCAL]
|
||||
self.user[slug][ATTR_IMAGE] = image
|
||||
self.save_data()
|
||||
|
498
hassio/addons/model.py
Normal file
498
hassio/addons/model.py
Normal file
@@ -0,0 +1,498 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
from distutils.version import StrictVersion
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_BOOT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PANEL_ADMIN,
|
||||
ATTR_PANEL_ICON,
|
||||
ATTR_PANEL_TITLE,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STDIN,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
SECURITY_DEFAULT,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .validate import MACHINE_ALL, RE_SERVICE, RE_VOLUME, validate_options
|
||||
|
||||
Data = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonModel(CoreSysAttributes):
|
||||
"""Add-on Data layout."""
|
||||
|
||||
slug: str = None
|
||||
|
||||
@property
|
||||
def data(self) -> Data:
|
||||
"""Return Add-on config/data."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def is_installed(self) -> bool:
|
||||
"""Return True if an add-on is installed."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def is_detached(self) -> bool:
|
||||
"""Return True if add-on is detached."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
return self._available(self.data)
|
||||
|
||||
@property
|
||||
def options(self) -> Dict[str, Any]:
|
||||
"""Return options with local changes."""
|
||||
return self.data[ATTR_OPTIONS]
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
"""Return boot config with prio local settings."""
|
||||
return self.data[ATTR_BOOT]
|
||||
|
||||
@property
|
||||
def auto_update(self) -> Optional[bool]:
|
||||
"""Return if auto update is enable."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of add-on."""
|
||||
return self.data[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def timeout(self) -> int:
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self.data[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def uuid(self) -> Optional[str]:
|
||||
"""Return an API token for this add-on."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_entry(self) -> Optional[str]:
|
||||
"""Return ingress external URL."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
"""Return description of add-on."""
|
||||
return self.data[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self) -> Optional[str]:
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
with readme.open("r") as readme_file:
|
||||
return readme_file.read()
|
||||
|
||||
@property
|
||||
def repository(self) -> str:
|
||||
"""Return repository of add-on."""
|
||||
return self.data[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
"""Return version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def protected(self) -> bool:
|
||||
"""Return if add-on is in protected mode."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def startup(self) -> Optional[str]:
|
||||
"""Return startup type of add-on."""
|
||||
return self.data.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def services_role(self) -> Dict[str, str]:
|
||||
"""Return dict of services with rights."""
|
||||
services_list = self.data.get(ATTR_SERVICES, [])
|
||||
|
||||
services = {}
|
||||
for data in services_list:
|
||||
service = RE_SERVICE.match(data)
|
||||
services[service.group("service")] = service.group("rights")
|
||||
|
||||
return services
|
||||
|
||||
@property
|
||||
def discovery(self) -> List[str]:
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self.data.get(ATTR_DISCOVERY, [])
|
||||
|
||||
@property
|
||||
def ports_description(self) -> Optional[Dict[str, str]]:
|
||||
"""Return descriptions of ports."""
|
||||
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||
"""Return ports of add-on."""
|
||||
return self.data.get(ATTR_PORTS)
|
||||
|
||||
@property
|
||||
def ingress_url(self) -> Optional[str]:
|
||||
"""Return URL to ingress url."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def webui(self) -> Optional[str]:
|
||||
"""Return URL to webui or None."""
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def ingress_port(self) -> Optional[int]:
|
||||
"""Return Ingress port."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def panel_icon(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data[ATTR_PANEL_ICON]
|
||||
|
||||
@property
|
||||
def panel_title(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data.get(ATTR_PANEL_TITLE, self.name)
|
||||
|
||||
@property
|
||||
def panel_admin(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data[ATTR_PANEL_ADMIN]
|
||||
|
||||
@property
|
||||
def host_network(self) -> bool:
|
||||
"""Return True if add-on run on host network."""
|
||||
return self.data[ATTR_HOST_NETWORK]
|
||||
|
||||
@property
|
||||
def host_pid(self) -> bool:
|
||||
"""Return True if add-on run on host PID namespace."""
|
||||
return self.data[ATTR_HOST_PID]
|
||||
|
||||
@property
|
||||
def host_ipc(self) -> bool:
|
||||
"""Return True if add-on run on host IPC namespace."""
|
||||
return self.data[ATTR_HOST_IPC]
|
||||
|
||||
@property
|
||||
def host_dbus(self) -> bool:
|
||||
"""Return True if add-on run on host D-BUS."""
|
||||
return self.data[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self) -> Optional[List[str]]:
|
||||
"""Return devices of add-on."""
|
||||
return self.data.get(ATTR_DEVICES, [])
|
||||
|
||||
@property
|
||||
def auto_uart(self) -> bool:
|
||||
"""Return True if we should map all UART device."""
|
||||
return self.data[ATTR_AUTO_UART]
|
||||
|
||||
@property
|
||||
def tmpfs(self) -> Optional[str]:
|
||||
"""Return tmpfs of add-on."""
|
||||
return self.data.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self) -> Optional[Dict[str, str]]:
|
||||
"""Return environment of add-on."""
|
||||
return self.data.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self) -> List[str]:
|
||||
"""Return list of privilege."""
|
||||
return self.data.get(ATTR_PRIVILEGED, [])
|
||||
|
||||
@property
|
||||
def apparmor(self) -> str:
|
||||
"""Return True if AppArmor is enabled."""
|
||||
if not self.data.get(ATTR_APPARMOR):
|
||||
return SECURITY_DISABLE
|
||||
elif self.sys_host.apparmor.exists(self.slug):
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def legacy(self) -> bool:
|
||||
"""Return if the add-on don't support Home Assistant labels."""
|
||||
return self.data[ATTR_LEGACY]
|
||||
|
||||
@property
|
||||
def access_docker_api(self) -> bool:
|
||||
"""Return if the add-on need read-only Docker API access."""
|
||||
return self.data[ATTR_DOCKER_API]
|
||||
|
||||
@property
|
||||
def access_hassio_api(self) -> bool:
|
||||
"""Return True if the add-on access to Hass.io REASTful API."""
|
||||
return self.data[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
def access_homeassistant_api(self) -> bool:
|
||||
"""Return True if the add-on access to Home Assistant API proxy."""
|
||||
return self.data[ATTR_HOMEASSISTANT_API]
|
||||
|
||||
@property
|
||||
def hassio_role(self) -> str:
|
||||
"""Return Hass.io role for API."""
|
||||
return self.data[ATTR_HASSIO_ROLE]
|
||||
|
||||
@property
|
||||
def with_stdin(self) -> bool:
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
return self.data[ATTR_STDIN]
|
||||
|
||||
@property
|
||||
def with_ingress(self) -> bool:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return self.data[ATTR_INGRESS]
|
||||
|
||||
@property
|
||||
def ingress_panel(self) -> Optional[bool]:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def with_gpio(self) -> bool:
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
return self.data[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_kernel_modules(self) -> bool:
|
||||
"""Return True if the add-on access to kernel modules."""
|
||||
return self.data[ATTR_KERNEL_MODULES]
|
||||
|
||||
@property
|
||||
def with_full_access(self) -> bool:
|
||||
"""Return True if the add-on want full access to hardware."""
|
||||
return self.data[ATTR_FULL_ACCESS]
|
||||
|
||||
@property
|
||||
def with_devicetree(self) -> bool:
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def access_auth_api(self) -> bool:
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
return self.data[ATTR_AUTH_API]
|
||||
|
||||
@property
|
||||
def with_audio(self) -> bool:
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self.data[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def homeassistant_version(self) -> Optional[str]:
|
||||
"""Return min Home Assistant version they needed by Add-on."""
|
||||
return self.data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def url(self) -> Optional[str]:
|
||||
"""Return URL of add-on."""
|
||||
return self.data.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self) -> List[str]:
|
||||
"""Return list of supported arch."""
|
||||
return self.data[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def supported_machine(self) -> List[str]:
|
||||
"""Return list of supported machine."""
|
||||
return self.data.get(ATTR_MACHINE, MACHINE_ALL)
|
||||
|
||||
@property
|
||||
def image(self) -> str:
|
||||
"""Generate image name from data."""
|
||||
return self._image(self.data)
|
||||
|
||||
@property
|
||||
def need_build(self) -> bool:
|
||||
"""Return True if this add-on need a local build."""
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def map_volumes(self) -> Dict[str, str]:
|
||||
"""Return a dict of {volume: policy} from add-on."""
|
||||
volumes = {}
|
||||
for volume in self.data[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
volumes[result.group(1)] = result.group(2) or "ro"
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_location(self) -> Path:
|
||||
"""Return path to this add-on."""
|
||||
return Path(self.data[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_icon(self) -> Path:
|
||||
"""Return path to add-on icon."""
|
||||
return Path(self.path_location, "icon.png")
|
||||
|
||||
@property
|
||||
def path_logo(self) -> Path:
|
||||
"""Return path to add-on logo."""
|
||||
return Path(self.path_location, "logo.png")
|
||||
|
||||
@property
|
||||
def path_changelog(self) -> Path:
|
||||
"""Return path to add-on changelog."""
|
||||
return Path(self.path_location, "CHANGELOG.md")
|
||||
|
||||
@property
|
||||
def path_apparmor(self) -> Path:
|
||||
"""Return path to custom AppArmor profile."""
|
||||
return Path(self.path_location, "apparmor.txt")
|
||||
|
||||
@property
|
||||
def schema(self) -> vol.Schema:
|
||||
"""Create a schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
if not isinstance(other, AddonModel):
|
||||
return False
|
||||
return self.slug == other.slug
|
||||
|
||||
def _available(self, config) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
# Architecture
|
||||
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
||||
return False
|
||||
|
||||
# Machine / Hardware
|
||||
machine = config.get(ATTR_MACHINE) or MACHINE_ALL
|
||||
if self.sys_machine not in machine:
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version = config.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version
|
||||
if StrictVersion(self.sys_homeassistant.version) < StrictVersion(version):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _image(self, config) -> str:
|
||||
"""Generate image name from data."""
|
||||
# Repository with Dockerhub images
|
||||
if ATTR_IMAGE in config:
|
||||
arch = self.sys_arch.match(config[ATTR_ARCH])
|
||||
return config[ATTR_IMAGE].format(arch=arch)
|
||||
|
||||
# local build
|
||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||
|
||||
def install(self) -> Awaitable[None]:
|
||||
"""Install this add-on."""
|
||||
return self.sys_addons.install(self.slug)
|
||||
|
||||
def uninstall(self) -> Awaitable[None]:
|
||||
"""Uninstall this add-on."""
|
||||
return self.sys_addons.uninstall(self.slug)
|
||||
|
||||
def update(self) -> Awaitable[None]:
|
||||
"""Update this add-on."""
|
||||
return self.sys_addons.update(self.slug)
|
||||
|
||||
def rebuild(self) -> Awaitable[None]:
|
||||
"""Rebuild this add-on."""
|
||||
return self.sys_addons.rebuild(self.slug)
|
@@ -2,10 +2,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import (
|
||||
@@ -20,16 +18,14 @@ from ..const import (
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
from ..exceptions import AddonsNotSupportedError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
from .model import AddonModel
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rating_security(addon: Addon) -> int:
|
||||
def rating_security(addon: AddonModel) -> int:
|
||||
"""Return 1-6 for security rating.
|
||||
|
||||
1 = not secure
|
||||
@@ -86,34 +82,6 @@ def rating_security(addon: Addon) -> int:
|
||||
return max(min(6, rating), 1)
|
||||
|
||||
|
||||
def get_hash_from_repository(name: str) -> str:
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def extract_hash_from_path(path: Path) -> str:
|
||||
"""Extract repo id from path."""
|
||||
repository_dir = path.parts[-1]
|
||||
|
||||
if not RE_SHA1.match(repository_dir):
|
||||
return get_hash_from_repository(repository_dir)
|
||||
return repository_dir
|
||||
|
||||
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if add-on is installed."""
|
||||
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
raise AddonsNotSupportedError()
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
||||
|
||||
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
|
@@ -41,11 +41,14 @@ from ..const import (
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_PANEL_ADMIN,
|
||||
ATTR_PANEL_ICON,
|
||||
ATTR_PANEL_TITLE,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAINTAINER,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
@@ -97,14 +100,14 @@ _LOGGER = logging.getLogger(__name__)
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
V_PORT = 'port'
|
||||
V_MATCH = 'match'
|
||||
V_STR = "str"
|
||||
V_INT = "int"
|
||||
V_FLOAT = "float"
|
||||
V_BOOL = "bool"
|
||||
V_EMAIL = "email"
|
||||
V_URL = "url"
|
||||
V_PORT = "port"
|
||||
V_MATCH = "match"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
@@ -115,18 +118,28 @@ RE_SCHEMA_ELEMENT = re.compile(
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||
)
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
|
||||
MACHINE_ALL = [
|
||||
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
|
||||
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
|
||||
'raspberrypi3', 'raspberrypi3-64', 'tinker',
|
||||
"intel-nuc",
|
||||
"odroid-c2",
|
||||
"odroid-xu",
|
||||
"orangepi-prime",
|
||||
"qemux86",
|
||||
"qemux86-64",
|
||||
"qemuarm",
|
||||
"qemuarm-64",
|
||||
"raspberrypi",
|
||||
"raspberrypi2",
|
||||
"raspberrypi3",
|
||||
"raspberrypi3-64",
|
||||
"tinker",
|
||||
]
|
||||
|
||||
|
||||
@@ -140,134 +153,157 @@ def _simple_startup(value):
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(NETWORK_PORT, vol.Equal(0)),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||
NETWORK_PORT, vol.Equal(0)
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
[
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||
)
|
||||
},
|
||||
)
|
||||
],
|
||||
vol.Schema(
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
)
|
||||
}
|
||||
),
|
||||
], vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE):
|
||||
vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
|
||||
}),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||
vol.Coerce(str): vol.Coerce(str)
|
||||
}),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(str),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
False,
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=10, max=120)
|
||||
),
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
|
||||
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||
),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
|
||||
{vol.Coerce(str): vol.Coerce(str)}
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||
str
|
||||
),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
"""Validate schema."""
|
||||
|
||||
def validate(struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
@@ -293,7 +329,7 @@ def validate_options(raw_schema):
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, 'root')
|
||||
_check_missing_options(raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
return validate
|
||||
@@ -312,7 +348,7 @@ def _single_validate(typ, value, key):
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||
for group_name in ("i_min", "i_max", "f_min", "f_max"):
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
@@ -332,7 +368,7 @@ def _single_validate(typ, value, key):
|
||||
elif typ.startswith(V_PORT):
|
||||
return NETWORK_PORT(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group('match'))(str(value))
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
@@ -364,8 +400,7 @@ def _nested_validate_dict(typ, data_dict, key):
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
||||
c_value, c_key)
|
||||
options[c_key] = _nested_validate_list(typ[c_key][0], c_value, c_key)
|
||||
else:
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
@@ -377,7 +412,6 @@ def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and \
|
||||
origin[miss_opt].endswith("?"):
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
@@ -32,7 +32,8 @@ class RestAPI(CoreSysAttributes):
|
||||
self.coresys: CoreSys = coresys
|
||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||
self.webapp: web.Application = web.Application(
|
||||
middlewares=[self.security.token_validation])
|
||||
middlewares=[self.security.token_validation]
|
||||
)
|
||||
|
||||
# service stuff
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||
@@ -60,227 +61,224 @@ class RestAPI(CoreSysAttributes):
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/host/info', api_host.info),
|
||||
web.post('/host/reboot', api_host.reboot),
|
||||
web.post('/host/shutdown', api_host.shutdown),
|
||||
web.post('/host/reload', api_host.reload),
|
||||
web.post('/host/options', api_host.options),
|
||||
web.get('/host/services', api_host.services),
|
||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
||||
web.post('/host/services/{service}/start', api_host.service_start),
|
||||
web.post('/host/services/{service}/restart',
|
||||
api_host.service_restart),
|
||||
web.post('/host/services/{service}/reload',
|
||||
api_host.service_reload),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/host/info", api_host.info),
|
||||
web.post("/host/reboot", api_host.reboot),
|
||||
web.post("/host/shutdown", api_host.shutdown),
|
||||
web.post("/host/reload", api_host.reload),
|
||||
web.post("/host/options", api_host.options),
|
||||
web.get("/host/services", api_host.services),
|
||||
web.post("/host/services/{service}/stop", api_host.service_stop),
|
||||
web.post("/host/services/{service}/start", api_host.service_start),
|
||||
web.post("/host/services/{service}/restart", api_host.service_restart),
|
||||
web.post("/host/services/{service}/reload", api_host.service_reload),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_hassos(self) -> None:
|
||||
"""Register HassOS functions."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/hassos/info', api_hassos.info),
|
||||
web.post('/hassos/update', api_hassos.update),
|
||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/hassos/info", api_hassos.info),
|
||||
web.post("/hassos/update", api_hassos.update),
|
||||
web.post("/hassos/update/cli", api_hassos.update_cli),
|
||||
web.post("/hassos/config/sync", api_hassos.config_sync),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_hardware(self) -> None:
|
||||
"""Register hardware functions."""
|
||||
api_hardware = APIHardware()
|
||||
api_hardware.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/hardware/info', api_hardware.info),
|
||||
web.get('/hardware/audio', api_hardware.audio),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/hardware/info", api_hardware.info),
|
||||
web.get("/hardware/audio", api_hardware.audio),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_info(self) -> None:
|
||||
"""Register info functions."""
|
||||
api_info = APIInfo()
|
||||
api_info.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/info', api_info.info),
|
||||
])
|
||||
self.webapp.add_routes([web.get("/info", api_info.info)])
|
||||
|
||||
def _register_auth(self) -> None:
|
||||
"""Register auth functions."""
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.post('/auth', api_auth.auth),
|
||||
])
|
||||
self.webapp.add_routes([web.post("/auth", api_auth.auth)])
|
||||
|
||||
def _register_supervisor(self) -> None:
|
||||
"""Register Supervisor functions."""
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/supervisor/ping', api_supervisor.ping),
|
||||
web.get('/supervisor/info', api_supervisor.info),
|
||||
web.get('/supervisor/stats', api_supervisor.stats),
|
||||
web.get('/supervisor/logs', api_supervisor.logs),
|
||||
web.post('/supervisor/update', api_supervisor.update),
|
||||
web.post('/supervisor/reload', api_supervisor.reload),
|
||||
web.post('/supervisor/options', api_supervisor.options),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/supervisor/ping", api_supervisor.ping),
|
||||
web.get("/supervisor/info", api_supervisor.info),
|
||||
web.get("/supervisor/stats", api_supervisor.stats),
|
||||
web.get("/supervisor/logs", api_supervisor.logs),
|
||||
web.post("/supervisor/update", api_supervisor.update),
|
||||
web.post("/supervisor/reload", api_supervisor.reload),
|
||||
web.post("/supervisor/options", api_supervisor.options),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_homeassistant(self) -> None:
|
||||
"""Register Home Assistant functions."""
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/info', api_hass.info),
|
||||
web.get('/homeassistant/logs', api_hass.logs),
|
||||
web.get('/homeassistant/stats', api_hass.stats),
|
||||
web.post('/homeassistant/options', api_hass.options),
|
||||
web.post('/homeassistant/update', api_hass.update),
|
||||
web.post('/homeassistant/restart', api_hass.restart),
|
||||
web.post('/homeassistant/stop', api_hass.stop),
|
||||
web.post('/homeassistant/start', api_hass.start),
|
||||
web.post('/homeassistant/check', api_hass.check),
|
||||
web.post('/homeassistant/rebuild', api_hass.rebuild),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/homeassistant/info", api_hass.info),
|
||||
web.get("/homeassistant/logs", api_hass.logs),
|
||||
web.get("/homeassistant/stats", api_hass.stats),
|
||||
web.post("/homeassistant/options", api_hass.options),
|
||||
web.post("/homeassistant/update", api_hass.update),
|
||||
web.post("/homeassistant/restart", api_hass.restart),
|
||||
web.post("/homeassistant/stop", api_hass.stop),
|
||||
web.post("/homeassistant/start", api_hass.start),
|
||||
web.post("/homeassistant/check", api_hass.check),
|
||||
web.post("/homeassistant/rebuild", api_hass.rebuild),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_proxy(self) -> None:
|
||||
"""Register Home Assistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/', api_proxy.api),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/homeassistant/api/websocket", api_proxy.websocket),
|
||||
web.get("/homeassistant/websocket", api_proxy.websocket),
|
||||
web.get("/homeassistant/api/stream", api_proxy.stream),
|
||||
web.post("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||
web.get("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||
web.get("/homeassistant/api/", api_proxy.api),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_addons(self) -> None:
|
||||
"""Register Add-on functions."""
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/addons', api_addons.list),
|
||||
web.post('/addons/reload', api_addons.reload),
|
||||
web.get('/addons/{addon}/info', api_addons.info),
|
||||
web.post('/addons/{addon}/install', api_addons.install),
|
||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
||||
web.post('/addons/{addon}/start', api_addons.start),
|
||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
||||
web.post('/addons/{addon}/update', api_addons.update),
|
||||
web.post('/addons/{addon}/options', api_addons.options),
|
||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
||||
web.post('/addons/{addon}/security', api_addons.security),
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons", api_addons.list),
|
||||
web.post("/addons/reload", api_addons.reload),
|
||||
web.get("/addons/{addon}/info", api_addons.info),
|
||||
web.post("/addons/{addon}/install", api_addons.install),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||
web.post("/addons/{addon}/update", api_addons.update),
|
||||
web.post("/addons/{addon}/options", api_addons.options),
|
||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||
web.get("/addons/{addon}/icon", api_addons.icon),
|
||||
web.get("/addons/{addon}/logo", api_addons.logo),
|
||||
web.get("/addons/{addon}/changelog", api_addons.changelog),
|
||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||
web.post("/addons/{addon}/security", api_addons.security),
|
||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_ingress(self) -> None:
|
||||
"""Register Ingress functions."""
|
||||
api_ingress = APIIngress()
|
||||
api_ingress.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.post('/ingress/session', api_ingress.create_session),
|
||||
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.post("/ingress/session", api_ingress.create_session),
|
||||
web.get("/ingress/panels", api_ingress.panels),
|
||||
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_snapshots(self) -> None:
|
||||
"""Register snapshots functions."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/snapshots', api_snapshots.list),
|
||||
web.post('/snapshots/reload', api_snapshots.reload),
|
||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
||||
web.post('/snapshots/{snapshot}/restore/full',
|
||||
api_snapshots.restore_full),
|
||||
web.post('/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial),
|
||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/snapshots", api_snapshots.list),
|
||||
web.post("/snapshots/reload", api_snapshots.reload),
|
||||
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
|
||||
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
|
||||
web.post("/snapshots/new/upload", api_snapshots.upload),
|
||||
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
|
||||
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
|
||||
web.post(
|
||||
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
|
||||
),
|
||||
web.post(
|
||||
"/snapshots/{snapshot}/restore/partial",
|
||||
api_snapshots.restore_partial,
|
||||
),
|
||||
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_services(self) -> None:
|
||||
"""Register services functions."""
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/services', api_services.list),
|
||||
web.get('/services/{service}', api_services.get_service),
|
||||
web.post('/services/{service}', api_services.set_service),
|
||||
web.delete('/services/{service}', api_services.del_service),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/services", api_services.list),
|
||||
web.get("/services/{service}", api_services.get_service),
|
||||
web.post("/services/{service}", api_services.set_service),
|
||||
web.delete("/services/{service}", api_services.del_service),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_discovery(self) -> None:
|
||||
"""Register discovery functions."""
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/discovery', api_discovery.list),
|
||||
web.get('/discovery/{uuid}', api_discovery.get_discovery),
|
||||
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
|
||||
web.post('/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/discovery", api_discovery.list),
|
||||
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||
web.post("/discovery", api_discovery.set_discovery),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
||||
def create_response(panel_file):
|
||||
"""Create a function to generate a response."""
|
||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.add_routes(
|
||||
[web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
||||
])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
||||
self.webapp.add_routes([
|
||||
web.get('/app-es5/index.html', create_response('index')),
|
||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.70
|
||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
||||
)
|
||||
|
||||
try:
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
|
||||
err)
|
||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
|
@@ -7,7 +7,7 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..addons import AnyAddon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
@@ -44,6 +44,7 @@ from ..const import (
|
||||
ATTR_ICON,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_URL,
|
||||
ATTR_INSTALLED,
|
||||
@@ -81,6 +82,7 @@ from ..const import (
|
||||
CONTENT_TYPE_PNG,
|
||||
CONTENT_TYPE_TEXT,
|
||||
REQUEST_FROM,
|
||||
STATE_NONE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
@@ -89,34 +91,35 @@ from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
})
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SECURITY = vol.Schema({
|
||||
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
|
||||
})
|
||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(self, request: web.Request, check_installed: bool = True) -> Addon:
|
||||
def _extract_addon(
|
||||
self, request: web.Request, check_installed: bool = True
|
||||
) -> AnyAddon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug = request.match_info.get('addon')
|
||||
addon_slug = request.match_info.get("addon")
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == 'self':
|
||||
if addon_slug == "self":
|
||||
return request.get(REQUEST_FROM)
|
||||
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
@@ -132,57 +135,57 @@ class APIAddons(CoreSysAttributes):
|
||||
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
for addon in self.sys_addons.all:
|
||||
data_addons.append(
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version if addon.is_installed else None,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
}
|
||||
)
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.sys_addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
})
|
||||
for repository in self.sys_store.all:
|
||||
data_repositories.append(
|
||||
{
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: data_repositories,
|
||||
}
|
||||
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request: web.Request) -> None:
|
||||
"""Reload all add-on data."""
|
||||
await asyncio.shield(self.sys_addons.reload())
|
||||
"""Reload all add-on data from store."""
|
||||
await asyncio.shield(self.sys_store.reload())
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
return {
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_AUTO_UPDATE: None,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_VERSION: None,
|
||||
ATTR_LAST_VERSION: addon.latest_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
@@ -191,6 +194,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_STATE: STATE_NONE,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
@@ -207,8 +211,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_WEBUI: None,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||
ATTR_AUTH_API: addon.access_auth_api,
|
||||
@@ -218,25 +222,45 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_AUDIO_INPUT: None,
|
||||
ATTR_AUDIO_OUTPUT: None,
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||
ATTR_IP_ADDRESS: None,
|
||||
ATTR_INGRESS: addon.with_ingress,
|
||||
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||
ATTR_INGRESS_URL: addon.ingress_url,
|
||||
ATTR_INGRESS_PORT: addon.ingress_port,
|
||||
ATTR_INGRESS_ENTRY: None,
|
||||
ATTR_INGRESS_URL: None,
|
||||
ATTR_INGRESS_PORT: None,
|
||||
ATTR_INGRESS_PANEL: None,
|
||||
}
|
||||
|
||||
if addon.is_installed:
|
||||
data.update(
|
||||
{
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||
ATTR_INGRESS_URL: addon.ingress_url,
|
||||
ATTR_INGRESS_PORT: addon.ingress_port,
|
||||
ATTR_INGRESS_PANEL: addon.ingress_panel,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||
ATTR_VERSION: addon.version,
|
||||
}
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Store user options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
addon_schema = SCHEMA_OPTIONS.extend(
|
||||
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
|
||||
)
|
||||
body = await api_validate(addon_schema, request)
|
||||
|
||||
if ATTR_OPTIONS in body:
|
||||
@@ -251,8 +275,11 @@ class APIAddons(CoreSysAttributes):
|
||||
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_INGRESS_PANEL in body:
|
||||
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
addon.save_data()
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def security(self, request: web.Request) -> None:
|
||||
@@ -319,7 +346,7 @@ class APIAddons(CoreSysAttributes):
|
||||
"""Update add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if addon.latest_version == addon.version_installed:
|
||||
if addon.latest_version == addon.version:
|
||||
raise APIError("No update available!")
|
||||
|
||||
return asyncio.shield(addon.update())
|
||||
@@ -352,7 +379,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if not addon.with_icon:
|
||||
raise APIError("No icon found!")
|
||||
|
||||
with addon.path_icon.open('rb') as png:
|
||||
with addon.path_icon.open("rb") as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
@@ -362,7 +389,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if not addon.with_logo:
|
||||
raise APIError("No logo found!")
|
||||
|
||||
with addon.path_logo.open('rb') as png:
|
||||
with addon.path_logo.open("rb") as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
@@ -372,7 +399,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if not addon.with_changelog:
|
||||
raise APIError("No changelog found!")
|
||||
|
||||
with addon.path_changelog.open('r') as changelog:
|
||||
with addon.path_changelog.open("r") as changelog:
|
||||
return changelog.read()
|
||||
|
||||
@api_process
|
||||
@@ -386,15 +413,15 @@ class APIAddons(CoreSysAttributes):
|
||||
await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
|
||||
def _pretty_devices(addon: Addon) -> List[str]:
|
||||
def _pretty_devices(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return None
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
return [row.split(":")[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon: Addon) -> List[str]:
|
||||
def _pretty_services(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified services role list."""
|
||||
services = []
|
||||
for name, access in addon.services_role.items():
|
||||
|
@@ -29,8 +29,8 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
username = data.get('username') or data.get('user')
|
||||
password = data.get('password')
|
||||
username = data.get("username") or data.get("user")
|
||||
password = data.get("password")
|
||||
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
|
||||
@@ -56,6 +56,6 @@ class APIAuth(CoreSysAttributes):
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(headers={
|
||||
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
|
||||
})
|
||||
raise HTTPUnauthorized(
|
||||
headers={WWW_AUTHENTICATE: 'Basic realm="Hass.io Authentication"'}
|
||||
)
|
||||
|
@@ -3,7 +3,13 @@ import logging
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
||||
ATTR_SERIAL,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_AUDIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@@ -41,8 +41,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(DOCKER_IMAGE),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
@@ -106,6 +106,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
self.sys_homeassistant.api_password = None
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
|
@@ -6,18 +6,25 @@ import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
|
||||
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
|
||||
ATTR_SERVICES, ATTR_CPE)
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_FEATURES,
|
||||
ATTR_KERNEL,
|
||||
ATTR_OPERATING_SYSTEM,
|
||||
ATTR_CHASSIS,
|
||||
ATTR_DEPLOYMENT,
|
||||
ATTR_STATE,
|
||||
ATTR_NAME,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_SERVICES,
|
||||
ATTR_CPE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE = 'service'
|
||||
SERVICE = "service"
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHost(CoreSysAttributes):
|
||||
@@ -44,7 +51,8 @@ class APIHost(CoreSysAttributes):
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
await asyncio.shield(
|
||||
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
|
||||
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME])
|
||||
)
|
||||
|
||||
@api_process
|
||||
def reboot(self, request):
|
||||
@@ -66,15 +74,15 @@ class APIHost(CoreSysAttributes):
|
||||
"""Return list of available services."""
|
||||
services = []
|
||||
for unit in self.sys_host.services:
|
||||
services.append({
|
||||
ATTR_NAME: unit.name,
|
||||
ATTR_DESCRIPTON: unit.description,
|
||||
ATTR_STATE: unit.state,
|
||||
})
|
||||
services.append(
|
||||
{
|
||||
ATTR_NAME: unit.name,
|
||||
ATTR_DESCRIPTON: unit.description,
|
||||
ATTR_STATE: unit.state,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SERVICES: services
|
||||
}
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
def service_start(self, request):
|
||||
|
@@ -14,7 +14,17 @@ from aiohttp.web_exceptions import (
|
||||
from multidict import CIMultiDict, istr
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import ATTR_SESSION, HEADER_TOKEN, REQUEST_FROM, COOKIE_INGRESS
|
||||
from ..const import (
|
||||
ATTR_ADMIN,
|
||||
ATTR_ICON,
|
||||
ATTR_SESSION,
|
||||
ATTR_TITLE,
|
||||
ATTR_PANELS,
|
||||
ATTR_ENABLE,
|
||||
COOKIE_INGRESS,
|
||||
HEADER_TOKEN,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
@@ -45,6 +55,20 @@ class APIIngress(CoreSysAttributes):
|
||||
"""Create URL to container."""
|
||||
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
|
||||
|
||||
@api_process
|
||||
async def panels(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a list of panel data."""
|
||||
addons = {}
|
||||
for addon in self.sys_ingress.addons:
|
||||
addons[addon.slug] = {
|
||||
ATTR_TITLE: addon.panel_title,
|
||||
ATTR_ICON: addon.panel_icon,
|
||||
ATTR_ADMIN: addon.panel_admin,
|
||||
ATTR_ENABLE: addon.ingress_panel,
|
||||
}
|
||||
|
||||
return {ATTR_PANELS: addons}
|
||||
|
||||
@api_process
|
||||
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
@@ -134,7 +158,12 @@ class APIIngress(CoreSysAttributes):
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
async with self.sys_websession.request(
|
||||
request.method, url, headers=source_header, params=request.query, data=data
|
||||
request.method,
|
||||
url,
|
||||
headers=source_header,
|
||||
params=request.query,
|
||||
allow_redirects=False,
|
||||
data=data,
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
@@ -209,8 +238,8 @@ def _is_websocket(request: web.Request) -> bool:
|
||||
headers = request.headers
|
||||
|
||||
if (
|
||||
headers.get(hdrs.CONNECTION) == "Upgrade"
|
||||
and headers.get(hdrs.UPGRADE) == "websocket"
|
||||
"upgrade" in headers.get(hdrs.CONNECTION, "").lower()
|
||||
and headers.get(hdrs.UPGRADE, "").lower() == "websocket"
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +1 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(121),i=t.n(e),o=t(123),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(124),i=t.n(e),o=t(126),u=t.n(o),a=i.a,c=u.a}}]);
|
BIN
hassio/api/panel/chunk.0f38aa06224ef471d7a4.js.gz
Normal file
BIN
hassio/api/panel/chunk.0f38aa06224ef471d7a4.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.320d5a8af6741fdbfaaf.js.gz
Normal file
BIN
hassio/api/panel/chunk.320d5a8af6741fdbfaaf.js.gz
Normal file
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.439f972b31b285e49c06.js.gz
Normal file
BIN
hassio/api/panel/chunk.439f972b31b285e49c06.js.gz
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
3
hassio/api/panel/chunk.69dd3afa8a315523db98.js
Normal file
3
hassio/api/panel/chunk.69dd3afa8a315523db98.js
Normal file
File diff suppressed because one or more lines are too long
@@ -34,27 +34,6 @@ part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright 2018 Google Inc. All Rights Reserved.
|
||||
@@ -133,6 +112,27 @@ and limitations under the License.
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
|
||||
@@ -178,3 +178,12 @@ found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/*!
|
||||
* Fuse.js v3.4.4 - Lightweight fuzzy-search (http://fusejs.io)
|
||||
*
|
||||
* Copyright (c) 2012-2017 Kirollos Risk (http://kiro.me)
|
||||
* All Rights Reserved. Apache Software License 2.0
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*/
|
BIN
hassio/api/panel/chunk.69dd3afa8a315523db98.js.gz
Normal file
BIN
hassio/api/panel/chunk.69dd3afa8a315523db98.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.510634470d399e194ace.js","sourceRoot":""}
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.69dd3afa8a315523db98.js","sourceRoot":""}
|
Binary file not shown.
Binary file not shown.
3
hassio/api/panel/chunk.807ac4267d577a6403cd.js
Normal file
3
hassio/api/panel/chunk.807ac4267d577a6403cd.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.807ac4267d577a6403cd.js.gz
Normal file
BIN
hassio/api/panel/chunk.807ac4267d577a6403cd.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.564a2f7b1c38ddaa4ce0.js","sourceRoot":""}
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.807ac4267d577a6403cd.js","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.8c6a9e6fd2862fc2fd9f.js.gz
Normal file
BIN
hassio/api/panel/chunk.8c6a9e6fd2862fc2fd9f.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.a1122a4f9ebd72cb73ff.js.gz
Normal file
BIN
hassio/api/panel/chunk.a1122a4f9ebd72cb73ff.js.gz
Normal file
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.a7e5fb452cd1b3a5faef.js","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.b060a768bba881c3480a.js.gz
Normal file
BIN
hassio/api/panel/chunk.b060a768bba881c3480a.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.659084fef4e3b7b66a76.js","sourceRoot":""}
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.b060a768bba881c3480a.js","sourceRoot":""}
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
1
hassio/api/panel/chunk.bef066214c930e19d0e8.js
Normal file
1
hassio/api/panel/chunk.bef066214c930e19d0e8.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.bef066214c930e19d0e8.js.gz
Normal file
BIN
hassio/api/panel/chunk.bef066214c930e19d0e8.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.cff54ae25a1e80506a71.js.gz
Normal file
BIN
hassio/api/panel/chunk.cff54ae25a1e80506a71.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.6e9c87e51920a9c354e5.js","sourceRoot":""}
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.cff54ae25a1e80506a71.js","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.d1638e8b6cd63427acdd.js.gz
Normal file
BIN
hassio/api/panel/chunk.d1638e8b6cd63427acdd.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.d1638e8b6cd63427acdd.js.map
Normal file
1
hassio/api/panel/chunk.d1638e8b6cd63427acdd.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.d1638e8b6cd63427acdd.js","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.f15d7f41c0d302cbbc7a.js","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.fa1d90049d43bcf36e42.js.gz
Normal file
BIN
hassio/api/panel/chunk.fa1d90049d43bcf36e42.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.fa1d90049d43bcf36e42.js.map
Normal file
1
hassio/api/panel/chunk.fa1d90049d43bcf36e42.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.fa1d90049d43bcf36e42.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.ff337d8fd6f580702170.js
Normal file
1
hassio/api/panel/chunk.ff337d8fd6f580702170.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.ff337d8fd6f580702170.js.gz
Normal file
BIN
hassio/api/panel/chunk.ff337d8fd6f580702170.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,u=[];c<a.length;c++)o=a[c],r[o]&&u.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(f&&f(n);u.length;)u.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"564a2f7b1c38ddaa4ce0",1:"659084fef4e3b7b66a76",2:"510634470d399e194ace",3:"f15d7f41c0d302cbbc7a",5:"5d31a1778f717ac8b063",6:"b60fb48c5280275dd7e2",7:"3a63ad36bccf4ea567fa",8:"a571dfa106202cc57af6",9:"a7e5fb452cd1b3a5faef",10:"b3340b3df270d20af4a1",11:"6e9c87e51920a9c354e5",12:"083a9da4ddb0a000aec4",13:"739b67c99ab56cdbd75d"}[e]+".js"}(e),i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var f=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(9),t.e(6)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],f=0,c=[];f<a.length;f++)o=a[f],r[o]&&c.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);c.length;)c.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"cff54ae25a1e80506a71",1:"fa1d90049d43bcf36e42",2:"d1638e8b6cd63427acdd",3:"807ac4267d577a6403cd",5:"a1122a4f9ebd72cb73ff",6:"bef066214c930e19d0e8",7:"0f38aa06224ef471d7a4",8:"8c6a9e6fd2862fc2fd9f",9:"69dd3afa8a315523db98",10:"320d5a8af6741fdbfaaf",11:"b060a768bba881c3480a",12:"ff337d8fd6f580702170",13:"439f972b31b285e49c06"}[e]+".js"}(e),i=function(n){f.onerror=f.onload=null,clearTimeout(u);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var u=setTimeout(function(){i({type:"timeout",target:f})},12e4);f.onerror=f.onload=i,document.head.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var f=0;f<a.length;f++)n(a[f]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(9),t.e(6)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -12,8 +12,7 @@ import async_timeout
|
||||
|
||||
from ..const import HEADER_HA_ACCESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
HomeAssistantAuthError, HomeAssistantAPIError, APIError)
|
||||
from ..exceptions import HomeAssistantAuthError, HomeAssistantAPIError, APIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,7 +24,7 @@ class APIProxy(CoreSysAttributes):
|
||||
"""Check the Hass.io token."""
|
||||
if AUTHORIZATION in request.headers:
|
||||
bearer = request.headers[AUTHORIZATION]
|
||||
hassio_token = bearer.split(' ')[-1]
|
||||
hassio_token = bearer.split(" ")[-1]
|
||||
else:
|
||||
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
||||
|
||||
@@ -54,10 +53,11 @@ class APIProxy(CoreSysAttributes):
|
||||
content_type = None
|
||||
|
||||
async with self.sys_homeassistant.make_request(
|
||||
request.method.lower(), f'api/{path}',
|
||||
content_type=content_type,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
request.method.lower(),
|
||||
f"api/{path}",
|
||||
content_type=content_type,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
) as resp:
|
||||
yield resp
|
||||
return
|
||||
@@ -78,7 +78,7 @@ class APIProxy(CoreSysAttributes):
|
||||
self._check_access(request)
|
||||
|
||||
_LOGGER.info("Home Assistant EventStream start")
|
||||
async with self._api_client(request, 'stream', timeout=None) as client:
|
||||
async with self._api_client(request, "stream", timeout=None) as client:
|
||||
response = web.StreamResponse()
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
@@ -97,13 +97,11 @@ class APIProxy(CoreSysAttributes):
|
||||
self._check_access(request)
|
||||
|
||||
# Normal request
|
||||
path = request.match_info.get('path', '')
|
||||
path = request.match_info.get("path", "")
|
||||
async with self._api_client(request, path) as client:
|
||||
data = await client.read()
|
||||
return web.Response(
|
||||
body=data,
|
||||
status=client.status,
|
||||
content_type=client.content_type
|
||||
body=data, status=client.status, content_type=client.content_type
|
||||
)
|
||||
|
||||
async def _websocket_client(self):
|
||||
@@ -112,39 +110,46 @@ class APIProxy(CoreSysAttributes):
|
||||
|
||||
try:
|
||||
client = await self.sys_websession_ssl.ws_connect(
|
||||
url, heartbeat=30, verify_ssl=False)
|
||||
url, heartbeat=30, verify_ssl=False
|
||||
)
|
||||
|
||||
# Handle authentication
|
||||
data = await client.receive_json()
|
||||
|
||||
if data.get('type') == 'auth_ok':
|
||||
if data.get("type") == "auth_ok":
|
||||
return client
|
||||
|
||||
if data.get('type') != 'auth_required':
|
||||
if data.get("type") != "auth_required":
|
||||
# Invalid protocol
|
||||
_LOGGER.error(
|
||||
"Got unexpected response from HA WebSocket: %s", data)
|
||||
_LOGGER.error("Got unexpected response from HA WebSocket: %s", data)
|
||||
raise APIError()
|
||||
|
||||
if self.sys_homeassistant.refresh_token:
|
||||
await self.sys_homeassistant.ensure_access_token()
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'access_token': self.sys_homeassistant.access_token,
|
||||
})
|
||||
await client.send_json(
|
||||
{
|
||||
"type": "auth",
|
||||
"access_token": self.sys_homeassistant.access_token,
|
||||
}
|
||||
)
|
||||
else:
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'api_password': self.sys_homeassistant.api_password,
|
||||
})
|
||||
await client.send_json(
|
||||
{
|
||||
"type": "auth",
|
||||
"api_password": self.sys_homeassistant.api_password,
|
||||
}
|
||||
)
|
||||
|
||||
data = await client.receive_json()
|
||||
|
||||
if data.get('type') == 'auth_ok':
|
||||
if data.get("type") == "auth_ok":
|
||||
return client
|
||||
|
||||
# Renew the Token is invalid
|
||||
if data.get('type') == 'invalid_auth' and self.sys_homeassistant.refresh_token:
|
||||
if (
|
||||
data.get("type") == "invalid_auth"
|
||||
and self.sys_homeassistant.refresh_token
|
||||
):
|
||||
self.sys_homeassistant.access_token = None
|
||||
return await self._websocket_client()
|
||||
|
||||
@@ -167,30 +172,27 @@ class APIProxy(CoreSysAttributes):
|
||||
|
||||
# handle authentication
|
||||
try:
|
||||
await server.send_json({
|
||||
'type': 'auth_required',
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
await server.send_json(
|
||||
{"type": "auth_required", "ha_version": self.sys_homeassistant.version}
|
||||
)
|
||||
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
hassio_token = response.get('api_password') or response.get('access_token')
|
||||
hassio_token = response.get("api_password") or response.get("access_token")
|
||||
addon = self.sys_addons.from_token(hassio_token)
|
||||
|
||||
if not addon or not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Unauthorized WebSocket access!")
|
||||
await server.send_json({
|
||||
'type': 'auth_invalid',
|
||||
'message': 'Invalid access',
|
||||
})
|
||||
await server.send_json(
|
||||
{"type": "auth_invalid", "message": "Invalid access"}
|
||||
)
|
||||
return server
|
||||
|
||||
_LOGGER.info("WebSocket access from %s", addon.slug)
|
||||
|
||||
await server.send_json({
|
||||
'type': 'auth_ok',
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
await server.send_json(
|
||||
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version}
|
||||
)
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||
return server
|
||||
@@ -207,16 +209,13 @@ class APIProxy(CoreSysAttributes):
|
||||
server_read = None
|
||||
while not server.closed and not client.closed:
|
||||
if not client_read:
|
||||
client_read = self.sys_create_task(
|
||||
client.receive_str())
|
||||
client_read = self.sys_create_task(client.receive_str())
|
||||
if not server_read:
|
||||
server_read = self.sys_create_task(
|
||||
server.receive_str())
|
||||
server_read = self.sys_create_task(server.receive_str())
|
||||
|
||||
# wait until data need to be processed
|
||||
await asyncio.wait(
|
||||
[client_read, server_read],
|
||||
return_when=asyncio.FIRST_COMPLETED
|
||||
[client_read, server_read], return_when=asyncio.FIRST_COMPLETED
|
||||
)
|
||||
|
||||
# server
|
||||
|
@@ -2,8 +2,13 @@
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_AVAILABLE, ATTR_PROVIDERS, ATTR_SLUG, ATTR_SERVICES, REQUEST_FROM,
|
||||
PROVIDE_SERVICE)
|
||||
ATTR_AVAILABLE,
|
||||
ATTR_PROVIDERS,
|
||||
ATTR_SLUG,
|
||||
ATTR_SERVICES,
|
||||
REQUEST_FROM,
|
||||
PROVIDE_SERVICE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
|
||||
@@ -13,7 +18,7 @@ class APIServices(CoreSysAttributes):
|
||||
|
||||
def _extract_service(self, request):
|
||||
"""Return service, throw an exception if it doesn't exist."""
|
||||
service = self.sys_services.get(request.match_info.get('service'))
|
||||
service = self.sys_services.get(request.match_info.get("service"))
|
||||
if not service:
|
||||
raise APIError("Service does not exist")
|
||||
|
||||
@@ -24,11 +29,13 @@ class APIServices(CoreSysAttributes):
|
||||
"""Show register services."""
|
||||
services = []
|
||||
for service in self.sys_services.list_services:
|
||||
services.append({
|
||||
ATTR_SLUG: service.slug,
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
ATTR_PROVIDERS: service.providers,
|
||||
})
|
||||
services.append(
|
||||
{
|
||||
ATTR_SLUG: service.slug,
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
ATTR_PROVIDERS: service.providers,
|
||||
}
|
||||
)
|
||||
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
|
@@ -10,9 +10,21 @@ import voluptuous as vol
|
||||
from .utils import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||
ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
|
||||
ATTR_NAME,
|
||||
ATTR_SLUG,
|
||||
ATTR_DATE,
|
||||
ATTR_ADDONS,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_VERSION,
|
||||
ATTR_SIZE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_TYPE,
|
||||
ATTR_SNAPSHOTS,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PROTECTED,
|
||||
CONTENT_TYPE_TAR,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
|
||||
@@ -20,30 +32,32 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||
{vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str))}
|
||||
)
|
||||
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_ADDONS): vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APISnapshots(CoreSysAttributes):
|
||||
@@ -51,7 +65,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return snapshot, throw an exception if it doesn't exist."""
|
||||
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
|
||||
snapshot = self.sys_snapshots.get(request.match_info.get("snapshot"))
|
||||
if not snapshot:
|
||||
raise APIError("Snapshot does not exist")
|
||||
return snapshot
|
||||
@@ -61,17 +75,17 @@ class APISnapshots(CoreSysAttributes):
|
||||
"""Return snapshot list."""
|
||||
data_snapshots = []
|
||||
for snapshot in self.sys_snapshots.list_snapshots:
|
||||
data_snapshots.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
})
|
||||
data_snapshots.append(
|
||||
{
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SNAPSHOTS: data_snapshots,
|
||||
}
|
||||
return {ATTR_SNAPSHOTS: data_snapshots}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
@@ -86,12 +100,14 @@ class APISnapshots(CoreSysAttributes):
|
||||
|
||||
data_addons = []
|
||||
for addon_data in snapshot.addons:
|
||||
data_addons.append({
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
ATTR_SIZE: addon_data[ATTR_SIZE],
|
||||
})
|
||||
data_addons.append(
|
||||
{
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
ATTR_SIZE: addon_data[ATTR_SIZE],
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
@@ -110,8 +126,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
async def snapshot_full(self, request):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.do_snapshot_full(**body))
|
||||
snapshot = await asyncio.shield(self.sys_snapshots.do_snapshot_full(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
@@ -121,8 +136,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
async def snapshot_partial(self, request):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.do_snapshot_partial(**body))
|
||||
snapshot = await asyncio.shield(self.sys_snapshots.do_snapshot_partial(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
@@ -135,7 +149,8 @@ class APISnapshots(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.sys_snapshots.do_restore_full(snapshot, **body))
|
||||
self.sys_snapshots.do_restore_full(snapshot, **body)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
@@ -144,7 +159,8 @@ class APISnapshots(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.sys_snapshots.do_restore_partial(snapshot, **body))
|
||||
self.sys_snapshots.do_restore_partial(snapshot, **body)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
@@ -168,7 +184,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
tar_file = Path(temp_dir, f"snapshot.tar")
|
||||
|
||||
try:
|
||||
with tar_file.open('wb') as snapshot:
|
||||
with tar_file.open("wb") as snapshot:
|
||||
async for data in request.content.iter_any():
|
||||
snapshot.write(data)
|
||||
|
||||
@@ -180,7 +196,8 @@ class APISnapshots(CoreSysAttributes):
|
||||
return False
|
||||
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.import_snapshot(tar_file))
|
||||
self.sys_snapshots.import_snapshot(tar_file)
|
||||
)
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
|
@@ -14,6 +14,8 @@ from ..const import (
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_ICON,
|
||||
ATTR_INSTALLED,
|
||||
@@ -43,6 +45,7 @@ from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
@@ -50,6 +53,8 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
vol.Optional(ATTR_LOGGING): LOG_LEVEL,
|
||||
vol.Optional(ATTR_DEBUG): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEBUG_BLOCK): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -68,21 +73,20 @@ class APISupervisor(CoreSysAttributes):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append(
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
}
|
||||
)
|
||||
for addon in self.sys_addons.installed:
|
||||
list_addons.append(
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
@@ -111,12 +115,18 @@ class APISupervisor(CoreSysAttributes):
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_DEBUG in body:
|
||||
self.sys_config.debug = body[ATTR_DEBUG]
|
||||
|
||||
if ATTR_DEBUG_BLOCK in body:
|
||||
self.sys_config.debug_block = body[ATTR_DEBUG_BLOCK]
|
||||
|
||||
if ATTR_LOGGING in body:
|
||||
self.sys_config.logging = body[ATTR_LOGGING]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||
await asyncio.shield(self.sys_store.update_repositories(new))
|
||||
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
|
@@ -7,8 +7,13 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY)
|
||||
JSON_RESULT,
|
||||
JSON_DATA,
|
||||
JSON_MESSAGE,
|
||||
RESULT_OK,
|
||||
RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..exceptions import HassioError, APIError, APIForbidden
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -26,6 +31,7 @@ def json_loads(data):
|
||||
|
||||
def api_process(method):
|
||||
"""Wrap function with true/false calls to rest api."""
|
||||
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return API information."""
|
||||
try:
|
||||
@@ -48,8 +54,10 @@ def api_process(method):
|
||||
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
|
||||
def wrap_method(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
@@ -59,29 +67,26 @@ def api_process_raw(content):
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
except HassioError:
|
||||
msg_data = b''
|
||||
msg_data = b""
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
return wrap_api
|
||||
|
||||
return wrap_method
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return an API error message."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
}, status=400)
|
||||
return web.json_response(
|
||||
{JSON_RESULT: RESULT_ERROR, JSON_MESSAGE: message}, status=400
|
||||
)
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_OK,
|
||||
JSON_DATA: data or {},
|
||||
})
|
||||
return web.json_response({JSON_RESULT: RESULT_OK, JSON_DATA: data or {}})
|
||||
|
||||
|
||||
async def api_validate(schema, request):
|
||||
|
@@ -1,14 +1,24 @@
|
||||
"""Handle Arch for underlay maschine/platforms."""
|
||||
import logging
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
import platform
|
||||
from typing import List
|
||||
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .exceptions import HassioArchNotFound, JsonFileError
|
||||
from .utils.json import read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MAP_CPU = {
|
||||
"armv7": "armv7",
|
||||
"armv6": "armhf",
|
||||
"armv8": "aarch64",
|
||||
"aarch64": "aarch64",
|
||||
"i686": "i386",
|
||||
"x86_64": "amd64",
|
||||
}
|
||||
|
||||
|
||||
class CpuArch(CoreSysAttributes):
|
||||
"""Manage available architectures."""
|
||||
@@ -42,10 +52,12 @@ class CpuArch(CoreSysAttributes):
|
||||
_LOGGER.warning("Can't read arch json")
|
||||
return
|
||||
|
||||
native_support = self.detect_cpu()
|
||||
|
||||
# Evaluate current CPU/Platform
|
||||
if not self.sys_machine or self.sys_machine not in arch_data:
|
||||
_LOGGER.warning("Can't detect underlay machine type!")
|
||||
self._default_arch = self.sys_supervisor.arch
|
||||
self._default_arch = native_support
|
||||
self._supported_arch.append(self.default)
|
||||
return
|
||||
|
||||
@@ -53,6 +65,10 @@ class CpuArch(CoreSysAttributes):
|
||||
self._supported_arch.extend(arch_data[self.sys_machine])
|
||||
self._default_arch = self.supported[0]
|
||||
|
||||
# Make sure native support is in supported list
|
||||
if native_support not in self._supported_arch:
|
||||
self._supported_arch.append(native_support)
|
||||
|
||||
def is_supported(self, arch_list: List[str]) -> bool:
|
||||
"""Return True if there is a supported arch by this platform."""
|
||||
return not set(self.supported).isdisjoint(set(arch_list))
|
||||
@@ -63,3 +79,11 @@ class CpuArch(CoreSysAttributes):
|
||||
if self_arch in arch_list:
|
||||
return self_arch
|
||||
raise HassioArchNotFound()
|
||||
|
||||
def detect_cpu(self) -> str:
|
||||
"""Return the arch type of local CPU."""
|
||||
cpu = platform.machine()
|
||||
for check, value in MAP_CPU.items():
|
||||
if cpu.startswith(check):
|
||||
return value
|
||||
return self.sys_supervisor.arch
|
||||
|
@@ -2,8 +2,7 @@
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON)
|
||||
from .const import FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_AUTH_CONFIG
|
||||
@@ -68,11 +67,14 @@ class Auth(JsonConfig, CoreSysAttributes):
|
||||
|
||||
try:
|
||||
async with self.sys_homeassistant.make_request(
|
||||
'post', 'api/hassio_auth', json={
|
||||
ATTR_USERNAME: username,
|
||||
ATTR_PASSWORD: password,
|
||||
ATTR_ADDON: addon.slug,
|
||||
}) as req:
|
||||
"post",
|
||||
"api/hassio_auth",
|
||||
json={
|
||||
ATTR_USERNAME: username,
|
||||
ATTR_PASSWORD: password,
|
||||
ATTR_ADDON: addon.slug,
|
||||
},
|
||||
) as req:
|
||||
|
||||
if req.status == 200:
|
||||
_LOGGER.info("Success login from %s", username)
|
||||
|
@@ -23,6 +23,7 @@ from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .store import StoreManager
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
@@ -53,6 +54,7 @@ async def initialize_coresys():
|
||||
coresys.ingress = Ingress(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.store = StoreManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
@@ -67,7 +69,7 @@ async def initialize_coresys():
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys):
|
||||
def initialize_system_data(coresys: CoreSys):
|
||||
"""Set up the default configuration and create folders."""
|
||||
config = coresys.config
|
||||
|
||||
@@ -124,7 +126,7 @@ def initialize_system_data(coresys):
|
||||
coresys.config.modify_log_level()
|
||||
|
||||
|
||||
def migrate_system_env(coresys):
|
||||
def migrate_system_env(coresys: CoreSys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
@@ -207,3 +209,16 @@ def reg_signal(loop):
|
||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
||||
|
||||
def supervisor_debugger(coresys: CoreSys) -> None:
|
||||
"""Setup debugger if needed."""
|
||||
if not coresys.config.debug or not coresys.dev:
|
||||
return
|
||||
import ptvsd
|
||||
|
||||
_LOGGER.info("Initialize Hass.io debugger")
|
||||
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 33333), redirect_output=True)
|
||||
if coresys.config.debug_block:
|
||||
ptvsd.wait_for_attach()
|
||||
|
@@ -2,19 +2,21 @@
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_LOGGING,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_WAIT_BOOT,
|
||||
FILE_HASSIO_CONFIG,
|
||||
HASSIO_DATA,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_LOGGING,
|
||||
)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
@@ -82,6 +84,26 @@ class CoreConfig(JsonConfig):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def debug(self) -> bool:
|
||||
"""Return True if ptvsd is enabled."""
|
||||
return self._data[ATTR_DEBUG]
|
||||
|
||||
@debug.setter
|
||||
def debug(self, value: bool):
|
||||
"""Set debug mode."""
|
||||
self._data[ATTR_DEBUG] = value
|
||||
|
||||
@property
|
||||
def debug_block(self) -> bool:
|
||||
"""Return True if ptvsd should waiting."""
|
||||
return self._data[ATTR_DEBUG_BLOCK]
|
||||
|
||||
@debug_block.setter
|
||||
def debug_block(self, value: bool):
|
||||
"""Set debug wait mode."""
|
||||
self._data[ATTR_DEBUG_BLOCK] = value
|
||||
|
||||
@property
|
||||
def logging(self) -> str:
|
||||
"""Return log level of system."""
|
||||
|
@@ -3,7 +3,7 @@ from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
|
||||
HASSIO_VERSION = "159"
|
||||
HASSIO_VERSION = "166"
|
||||
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
@@ -198,8 +198,18 @@ ATTR_INGRESS_PORT = "ingress_port"
|
||||
ATTR_INGRESS_ENTRY = "ingress_entry"
|
||||
ATTR_INGRESS_TOKEN = "ingress_token"
|
||||
ATTR_INGRESS_URL = "ingress_url"
|
||||
ATTR_INGRESS_PANEL = "ingress_panel"
|
||||
ATTR_PANEL_ICON = "panel_icon"
|
||||
ATTR_PANEL_TITLE = "panel_title"
|
||||
ATTR_PANEL_ADMIN = "panel_admin"
|
||||
ATTR_TITLE = "title"
|
||||
ATTR_ENABLE = "enable"
|
||||
ATTR_IP_ADDRESS = "ip_address"
|
||||
ATTR_SESSION = "session"
|
||||
ATTR_ADMIN = "admin"
|
||||
ATTR_PANELS = "panels"
|
||||
ATTR_DEBUG = "debug"
|
||||
ATTR_DEBUG_BLOCK = "debug_block"
|
||||
|
||||
PROVIDE_SERVICE = "provide"
|
||||
NEED_SERVICE = "need"
|
||||
|
@@ -44,6 +44,9 @@ class HassIO(CoreSysAttributes):
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Stores
|
||||
await self.sys_store.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
|
@@ -27,6 +27,7 @@ if TYPE_CHECKING:
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .store import StoreManager
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
@@ -43,7 +44,8 @@ class CoreSys:
|
||||
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
|
||||
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
|
||||
self._websession_ssl: aiohttp.ClientSession = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(ssl=False))
|
||||
connector=aiohttp.TCPConnector(ssl=False)
|
||||
)
|
||||
|
||||
# Global objects
|
||||
self._config: CoreConfig = CoreConfig()
|
||||
@@ -68,6 +70,7 @@ class CoreSys:
|
||||
self._dbus: DBusManager = None
|
||||
self._hassos: HassOS = None
|
||||
self._services: ServiceManager = None
|
||||
self._store: StoreManager = None
|
||||
self._discovery: Discovery = None
|
||||
|
||||
@property
|
||||
@@ -78,7 +81,7 @@ class CoreSys:
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self) -> str:
|
||||
def dev(self) -> bool:
|
||||
"""Return True if we run dev mode."""
|
||||
return self._updater.channel == CHANNEL_DEV
|
||||
|
||||
@@ -223,6 +226,18 @@ class CoreSys:
|
||||
raise RuntimeError("AddonManager already set!")
|
||||
self._addons = value
|
||||
|
||||
@property
|
||||
def store(self) -> StoreManager:
|
||||
"""Return StoreManager object."""
|
||||
return self._store
|
||||
|
||||
@store.setter
|
||||
def store(self, value: StoreManager):
|
||||
"""Set a StoreManager object."""
|
||||
if self._store:
|
||||
raise RuntimeError("StoreManager already set!")
|
||||
self._store = value
|
||||
|
||||
@property
|
||||
def snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
@@ -425,6 +440,11 @@ class CoreSysAttributes:
|
||||
"""Return AddonManager object."""
|
||||
return self.coresys.addons
|
||||
|
||||
@property
|
||||
def sys_store(self) -> StoreManager:
|
||||
"""Return StoreManager object."""
|
||||
return self.coresys.store
|
||||
|
||||
@property
|
||||
def sys_snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
|
@@ -8,8 +8,8 @@ from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.hostname1'
|
||||
DBUS_OBJECT = '/org/freedesktop/hostname1'
|
||||
DBUS_NAME = "org.freedesktop.hostname1"
|
||||
DBUS_OBJECT = "/org/freedesktop/hostname1"
|
||||
|
||||
|
||||
class Hostname(DBusInterface):
|
||||
|
@@ -8,8 +8,8 @@ from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'de.pengutronix.rauc'
|
||||
DBUS_OBJECT = '/'
|
||||
DBUS_NAME = "de.pengutronix.rauc"
|
||||
DBUS_OBJECT = "/"
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user