mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-09 21:19:26 +00:00
Compare commits
93 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
4c80727bcc | ||
![]() |
b2c3157361 | ||
![]() |
dc4f38ebd0 | ||
![]() |
7c9437c6ee | ||
![]() |
9ce9e10dfd | ||
![]() |
4e94043bca | ||
![]() |
749d45bf13 | ||
![]() |
ce99b3e259 | ||
![]() |
2c84daefab | ||
![]() |
dc1933fa88 | ||
![]() |
6970cebf80 | ||
![]() |
a234006de2 | ||
![]() |
2484149323 | ||
![]() |
778148424c | ||
![]() |
55f4a2395e | ||
![]() |
5a45d47ed8 | ||
![]() |
da601d1483 | ||
![]() |
e98a1272e9 | ||
![]() |
90e9cf788b | ||
![]() |
ec387c3010 | ||
![]() |
7e5a960c98 | ||
![]() |
f1bcbf2416 | ||
![]() |
bce144e197 | ||
![]() |
86a3735d83 | ||
![]() |
decf254e5f | ||
![]() |
e10fe16f21 | ||
![]() |
996891a740 | ||
![]() |
7385d026ea | ||
![]() |
09f43d6f3c | ||
![]() |
6906e757dd | ||
![]() |
963d242afa | ||
![]() |
3ed7cbe2ed | ||
![]() |
0da924f10b | ||
![]() |
76411da0a7 | ||
![]() |
ce87a72cf0 | ||
![]() |
f8c9e2f295 | ||
![]() |
00af027e51 | ||
![]() |
c91fce3281 | ||
![]() |
fb6df18ce9 | ||
![]() |
31f5c6f938 | ||
![]() |
d3a44b2992 | ||
![]() |
b537a03e6d | ||
![]() |
46093379e4 | ||
![]() |
1b17d90504 | ||
![]() |
7d42dd7ac2 | ||
![]() |
f35dcfcfd3 | ||
![]() |
c4f223c38a | ||
![]() |
71362f2c76 | ||
![]() |
96beac9fd9 | ||
![]() |
608c0e5076 | ||
![]() |
16ef6d82d2 | ||
![]() |
51940222be | ||
![]() |
21f3c4820b | ||
![]() |
214c6f919e | ||
![]() |
d9d438d571 | ||
![]() |
cf60d1f55c | ||
![]() |
f9aa12cbad | ||
![]() |
76266cc18b | ||
![]() |
50b9506ff3 | ||
![]() |
754cd64213 | ||
![]() |
113b62ee77 | ||
![]() |
d9874c4c3e | ||
![]() |
ca44e858c5 | ||
![]() |
c7ca4de307 | ||
![]() |
b77146a4e0 | ||
![]() |
45b4800378 | ||
![]() |
7f9232d2b9 | ||
![]() |
d90426f745 | ||
![]() |
c2deabb672 | ||
![]() |
ead5993f3e | ||
![]() |
1bcd74e8fa | ||
![]() |
118da3c275 | ||
![]() |
d7bb9013d4 | ||
![]() |
812c46d82b | ||
![]() |
c0462b28cd | ||
![]() |
82b2f66920 | ||
![]() |
01da42e1b6 | ||
![]() |
d652d22547 | ||
![]() |
baea84abe6 | ||
![]() |
c2d705a42a | ||
![]() |
f10b433e1f | ||
![]() |
67f562a846 | ||
![]() |
1edec61133 | ||
![]() |
c13a33bf71 | ||
![]() |
2ae93ae7b1 | ||
![]() |
8451020afe | ||
![]() |
a48e568efc | ||
![]() |
dee2808cb5 | ||
![]() |
06a2ab26a2 | ||
![]() |
45de0f2f39 | ||
![]() |
bac5f704dc | ||
![]() |
79669a5d04 | ||
![]() |
a6e712c9ea |
16
.github/main.workflow
vendored
16
.github/main.workflow
vendored
@@ -1,16 +0,0 @@
|
||||
workflow "tox" {
|
||||
on = "push"
|
||||
resolves = [
|
||||
"Python 3.7",
|
||||
"Json Files",
|
||||
]
|
||||
}
|
||||
|
||||
action "Python 3.7" {
|
||||
uses = "home-assistant/actions/py37-tox@master"
|
||||
}
|
||||
|
||||
action "Json Files" {
|
||||
uses = "home-assistant/actions/jq@master"
|
||||
args = "**/*.json"
|
||||
}
|
17
.github/stale.yml
vendored
Normal file
17
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
5
.hadolint.yaml
Normal file
5
.hadolint.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
ignored:
|
||||
- DL3018
|
||||
- DL3006
|
||||
- DL3013
|
||||
- SC2155
|
58
API.md
58
API.md
@@ -41,7 +41,11 @@ The addons from `addons` are only installed one.
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"ip_address": "ip address",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
@@ -78,6 +82,9 @@ Optional:
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
@@ -348,6 +355,7 @@ Load host configs from a USB stick.
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "arch",
|
||||
"machine": "Image machine type",
|
||||
"ip_address": "ip address",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
@@ -469,6 +477,7 @@ Get all available addons.
|
||||
"available": "bool",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"machine": "[raspberrypi2, tinker]",
|
||||
"homeassistant": "null|min Home Assistant version",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
@@ -477,6 +486,7 @@ Get all available addons.
|
||||
"build": "bool",
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"network_description": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_pid": "bool",
|
||||
"host_ipc": "bool",
|
||||
@@ -505,7 +515,13 @@ Get all available addons.
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0",
|
||||
"services_role": "['service:access']",
|
||||
"discovery": "['service']"
|
||||
"discovery": "['service']",
|
||||
"ip_address": "ip address",
|
||||
"ingress": "bool",
|
||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||
"ingress_port": "null|int",
|
||||
"ingress_panel": "null|bool"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -526,7 +542,8 @@ Get all available addons.
|
||||
},
|
||||
"options": {},
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0"
|
||||
"audio_input": "null|0,0",
|
||||
"ingress_panel": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -579,6 +596,40 @@ Write data to add-on stdin
|
||||
}
|
||||
```
|
||||
|
||||
### ingress
|
||||
|
||||
- POST `/ingress/session`
|
||||
|
||||
Create a new Session for access to ingress service.
|
||||
|
||||
```json
|
||||
{
|
||||
"session": "token"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/ingress/panels`
|
||||
|
||||
Return a list of enabled panels.
|
||||
|
||||
```json
|
||||
{
|
||||
"panels": {
|
||||
"addon_slug": {
|
||||
"enable": "boolean",
|
||||
"icon": "mdi:...",
|
||||
"title": "title",
|
||||
"admin": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- VIEW `/ingress/{token}`
|
||||
|
||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||
Need ingress session as cookie.
|
||||
|
||||
### discovery
|
||||
|
||||
- GET `/discovery`
|
||||
@@ -678,7 +729,8 @@ return:
|
||||
"machine": "type",
|
||||
"arch": "arch",
|
||||
"supported_arch": ["arch1", "arch2"],
|
||||
"channel": "stable|beta|dev"
|
||||
"channel": "stable|beta|dev",
|
||||
"logging": "debug|info|warning|error|critical"
|
||||
}
|
||||
```
|
||||
|
||||
|
17
Dockerfile
17
Dockerfile
@@ -1,6 +1,8 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
ARG BUILD_ARCH
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
@@ -14,20 +16,13 @@ RUN apk add --no-cache \
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
g++ \
|
||||
openssl-dev \
|
||||
libffi-dev \
|
||||
musl-dev \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --find-links https://wheels.hass.io/alpine-3.9/${BUILD_ARCH}/ \
|
||||
-r /usr/src/requirements.txt \
|
||||
&& rm -f /usr/src/requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||
&& rm -rf /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir -e /usr/src/hassio
|
||||
|
||||
CMD [ "python3", "-m", "hassio" ]
|
||||
|
@@ -1,3 +1,5 @@
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
# Hass.io
|
||||
|
||||
## First private cloud solution for home automation
|
||||
@@ -18,9 +20,9 @@ Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus on updater.json
|
||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
156
azure-pipelines.yml
Normal file
156
azure-pipelines.yml
Normal file
@@ -0,0 +1,156 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
exclude:
|
||||
- untagged*
|
||||
pr:
|
||||
- dev
|
||||
|
||||
variables:
|
||||
- name: versionHadolint
|
||||
value: 'v1.16.3'
|
||||
- name: versionBuilder
|
||||
value: '3.2'
|
||||
- name: versionWheels
|
||||
value: '0.3'
|
||||
- group: docker
|
||||
- group: wheels
|
||||
|
||||
jobs:
|
||||
|
||||
- job: 'Tox'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python $(python.version)'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install tox
|
||||
displayName: 'Install Tox'
|
||||
- script: tox
|
||||
displayName: 'Run Tox'
|
||||
|
||||
|
||||
- job: 'JQ'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: 'Install JQ'
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: 'Run JQ'
|
||||
|
||||
|
||||
- job: 'Hadolint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: 'Install Hadolint'
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: 'Run Hadolint'
|
||||
|
||||
|
||||
- job: 'Wheels'
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
strategy:
|
||||
maxParallel: 3
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: 'Initial cross build'
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: 'Install ssh key'
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: 'Install wheels builder'
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;libffi-dev;openssl-dev" \
|
||||
--index https://wheels.hass.io \
|
||||
--requirement requirements.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: 'Run wheels build'
|
||||
|
||||
|
||||
- job: 'ReleaseDEV'
|
||||
condition: and(eq(variables['Build.SourceBranchName'], 'dev'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('Wheels'))
|
||||
dependsOn:
|
||||
- 'JQ'
|
||||
- 'Tox'
|
||||
- 'Hadolint'
|
||||
- 'Wheels'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor --all -t /data --version dev --docker-hub homeassistant
|
||||
displayName: 'Build DEV'
|
||||
|
||||
|
||||
- job: 'Release'
|
||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'))
|
||||
dependsOn:
|
||||
- 'JQ'
|
||||
- 'Tox'
|
||||
- 'Hadolint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor --all -t /data --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
@@ -13,7 +13,8 @@ def initialize_event_loop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
|
||||
uvloop.install()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@@ -39,6 +40,7 @@ if __name__ == "__main__":
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
@@ -1,158 +1,251 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from ..const import BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonsError,
|
||||
AddonsNotSupportedError,
|
||||
DockerAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..store.addon import AddonStore
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import AddonsData
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.data = AddonsData(coresys)
|
||||
self.addons_obj = {}
|
||||
self.repositories_obj = {}
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: Dict[str, Addon] = {}
|
||||
self.store: Dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def list_addons(self):
|
||||
def all(self) -> List[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
return list(self.addons_obj.values())
|
||||
addons = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed add-ons."""
|
||||
return [addon for addon in self.addons_obj.values()
|
||||
if addon.is_installed]
|
||||
def installed(self) -> List[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of add-on repositories."""
|
||||
return list(self.repositories_obj.values())
|
||||
def get(self, addon_slug: str) -> Optional[AnyAddon]:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return an add-on from slug."""
|
||||
return self.addons_obj.get(addon_slug)
|
||||
Prio:
|
||||
1 - Local
|
||||
2 - Store
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
return self.store.get(addon_slug)
|
||||
|
||||
def from_token(self, token):
|
||||
def from_token(self, token: str) -> Optional[Addon]:
|
||||
"""Return an add-on from Hass.io token."""
|
||||
for addon in self.list_addons:
|
||||
if addon.is_installed and token == addon.hassio_token:
|
||||
for addon in self.installed:
|
||||
if token == addon.hassio_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self):
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
self.data.reload()
|
||||
|
||||
# Init Hass.io built-in repositories
|
||||
repositories = \
|
||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# Init custom repositories and load add-ons
|
||||
await self.load_repositories(repositories)
|
||||
|
||||
async def reload(self):
|
||||
"""Update add-ons from repository and reload list."""
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories_obj.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
|
||||
# update addons
|
||||
await self.load_addons()
|
||||
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories_obj)
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.coresys, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories_obj[url] = repository
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.sys_config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories_obj.pop(url).remove()
|
||||
self.sys_config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
self.data.reload()
|
||||
await self.load_addons()
|
||||
|
||||
async def load_addons(self):
|
||||
"""Update/add internal add-on store."""
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons_obj)
|
||||
del_addons = set(self.addons_obj) - all_addons
|
||||
|
||||
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(self.coresys, addon_slug)
|
||||
|
||||
for slug in self.data.system:
|
||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||
tasks.append(addon.load())
|
||||
self.addons_obj[addon_slug] = addon
|
||||
|
||||
# Run initial tasks
|
||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons_obj.pop(addon_slug)
|
||||
|
||||
async def boot(self, stage):
|
||||
async def boot(self, stage: str) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
for addon in self.installed:
|
||||
if addon.boot != BOOT_AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.start())
|
||||
|
||||
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
|
||||
_LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage):
|
||||
async def shutdown(self, stage: str) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and \
|
||||
await addon.state() == STATE_STARTED and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.stop())
|
||||
for addon in self.installed:
|
||||
if await addon.state() != STATE_STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.stop())
|
||||
|
||||
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
|
||||
_LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
if slug in self.local:
|
||||
_LOGGER.warning("Add-on %s is already installed", slug)
|
||||
return
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
_LOGGER.error("Add-on %s not exists", slug)
|
||||
raise AddonsError()
|
||||
|
||||
if not store.available:
|
||||
_LOGGER.error(
|
||||
"Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
self.data.install(store)
|
||||
addon = Addon(self.coresys, slug)
|
||||
|
||||
if not addon.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant add-on data folder %s", addon.path_data)
|
||||
addon.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
try:
|
||||
await addon.instance.install(store.version, store.image)
|
||||
except DockerAPIError:
|
||||
self.data.uninstall(addon)
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.local[slug] = addon
|
||||
|
||||
async def uninstall(self, slug: str) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
addon = self.local.get(slug)
|
||||
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
await addon.remove_data()
|
||||
|
||||
# Cleanup audio settings
|
||||
if addon.path_asound.exists():
|
||||
with suppress(OSError):
|
||||
addon.path_asound.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
await addon.uninstall_apparmor()
|
||||
|
||||
# Cleanup internal data
|
||||
addon.remove_discovery()
|
||||
self.data.uninstall(addon)
|
||||
self.local.pop(slug)
|
||||
|
||||
async def update(self, slug: str) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
|
||||
if addon.version == store.version:
|
||||
_LOGGER.warning("No update available for add-on %s", slug)
|
||||
return
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
if not store.available:
|
||||
_LOGGER.error(
|
||||
"Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Update instance
|
||||
last_state = await addon.state()
|
||||
try:
|
||||
await addon.instance.update(store.version, store.image)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
self.data.update(store)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await addon.start()
|
||||
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
_LOGGER.error("Version changed, use Update instead Rebuild")
|
||||
raise AddonsError()
|
||||
if not addon.need_build:
|
||||
_LOGGER.error("Can't rebuild a image based add-on")
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# remove docker container but not addon config
|
||||
last_state = await addon.state()
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
await addon.instance.install(addon.version)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.data.update(store)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await addon.start()
|
||||
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore")
|
||||
addon = Addon(self.coresys, slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore")
|
||||
addon = self.local[slug]
|
||||
|
||||
await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug in self.local:
|
||||
return
|
||||
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -9,27 +9,23 @@ from ..utils.json import JsonConfig
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
from . import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str) -> None:
|
||||
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
||||
"""Initialize Hass.io add-on builder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._id: str = slug
|
||||
self.addon = addon
|
||||
|
||||
super().__init__(
|
||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
|
||||
@property
|
||||
def addon(self) -> Addon:
|
||||
"""Return add-on of build data."""
|
||||
return self.sys_addons.get(self._id)
|
||||
raise RuntimeError()
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
|
@@ -1,38 +1,34 @@
|
||||
"""Init file for Hass.io add-on data."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
from typing import Any, Dict
|
||||
|
||||
from ..const import (
|
||||
ATTR_LOCATON,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SLUG,
|
||||
ATTR_IMAGE,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_USER,
|
||||
ATTR_VERSION,
|
||||
FILE_HASSIO_ADDONS,
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import JsonFileError
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from ..store.addon import AddonStore
|
||||
from .addon import Addon
|
||||
from .validate import SCHEMA_ADDONS_FILE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
Config = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Hold data for Add-ons inside Hass.io."""
|
||||
"""Hold data for installed Add-ons inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||
self.coresys = coresys
|
||||
self._repositories = {}
|
||||
self._cache = {}
|
||||
self.coresys: CoreSys = coresys
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
@@ -44,93 +40,35 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""Return add-on data from cache/repositories."""
|
||||
return self._cache
|
||||
def install(self, addon: AddonStore) -> None:
|
||||
"""Set addon as installed."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
}
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return add-on data from repositories."""
|
||||
return self._repositories
|
||||
def uninstall(self, addon: Addon) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self.system.pop(addon.slug, None)
|
||||
self.user.pop(addon.slug, None)
|
||||
self.save_data()
|
||||
|
||||
def reload(self):
|
||||
"""Read data from add-on repository."""
|
||||
self._cache = {}
|
||||
self._repositories = {}
|
||||
def update(self, addon: AddonStore) -> None:
|
||||
"""Update version of add-on."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug].update({
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
})
|
||||
self.save_data()
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self.user[slug] = deepcopy(user)
|
||||
self.system[slug] = deepcopy(system)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self.sys_config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning(
|
||||
"Can't read repository information from %s", repository_file
|
||||
)
|
||||
return
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||
return
|
||||
|
||||
# process data
|
||||
self._repositories[slug] = repository_info
|
||||
self._read_addons_folder(path, slug)
|
||||
|
||||
def _read_addons_folder(self, path, repository):
|
||||
"""Read data from add-ons folder."""
|
||||
for addon in path.glob("**/config.json"):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read %s from repository %s", addon, repository)
|
||||
continue
|
||||
|
||||
# validate
|
||||
try:
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning(
|
||||
"Can't read %s: %s", addon, humanize_error(addon_config, ex)
|
||||
)
|
||||
continue
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
"""Add local built-in repository into dataset."""
|
||||
try:
|
||||
builtin_file = Path(__file__).parent.joinpath("built-in.json")
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read built-in json")
|
||||
return
|
||||
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = builtin_data[REPOSITORY_CORE]
|
||||
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = builtin_data[REPOSITORY_LOCAL]
|
||||
self.user[slug][ATTR_IMAGE] = image
|
||||
self.save_data()
|
||||
|
498
hassio/addons/model.py
Normal file
498
hassio/addons/model.py
Normal file
@@ -0,0 +1,498 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
from distutils.version import StrictVersion
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_BOOT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PANEL_ADMIN,
|
||||
ATTR_PANEL_ICON,
|
||||
ATTR_PANEL_TITLE,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STDIN,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
SECURITY_DEFAULT,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .validate import MACHINE_ALL, RE_SERVICE, RE_VOLUME, validate_options
|
||||
|
||||
Data = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonModel(CoreSysAttributes):
|
||||
"""Add-on Data layout."""
|
||||
|
||||
slug: str = None
|
||||
|
||||
@property
|
||||
def data(self) -> Data:
|
||||
"""Return Add-on config/data."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def is_installed(self) -> bool:
|
||||
"""Return True if an add-on is installed."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def is_detached(self) -> bool:
|
||||
"""Return True if add-on is detached."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
return self._available(self.data)
|
||||
|
||||
@property
|
||||
def options(self) -> Dict[str, Any]:
|
||||
"""Return options with local changes."""
|
||||
return self.data[ATTR_OPTIONS]
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
"""Return boot config with prio local settings."""
|
||||
return self.data[ATTR_BOOT]
|
||||
|
||||
@property
|
||||
def auto_update(self) -> Optional[bool]:
|
||||
"""Return if auto update is enable."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of add-on."""
|
||||
return self.data[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def timeout(self) -> int:
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self.data[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def uuid(self) -> Optional[str]:
|
||||
"""Return an API token for this add-on."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_entry(self) -> Optional[str]:
|
||||
"""Return ingress external URL."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
"""Return description of add-on."""
|
||||
return self.data[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self) -> Optional[str]:
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, 'README.md')
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
with readme.open('r') as readme_file:
|
||||
return readme_file.read()
|
||||
|
||||
@property
|
||||
def repository(self) -> str:
|
||||
"""Return repository of add-on."""
|
||||
return self.data[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
"""Return version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def protected(self) -> bool:
|
||||
"""Return if add-on is in protected mode."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def startup(self) -> Optional[str]:
|
||||
"""Return startup type of add-on."""
|
||||
return self.data.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def services_role(self) -> Dict[str, str]:
|
||||
"""Return dict of services with rights."""
|
||||
services_list = self.data.get(ATTR_SERVICES, [])
|
||||
|
||||
services = {}
|
||||
for data in services_list:
|
||||
service = RE_SERVICE.match(data)
|
||||
services[service.group('service')] = service.group('rights')
|
||||
|
||||
return services
|
||||
|
||||
@property
|
||||
def discovery(self) -> List[str]:
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self.data.get(ATTR_DISCOVERY, [])
|
||||
|
||||
@property
|
||||
def ports_description(self) -> Optional[Dict[str, str]]:
|
||||
"""Return descriptions of ports."""
|
||||
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||
"""Return ports of add-on."""
|
||||
return self.data.get(ATTR_PORTS)
|
||||
|
||||
@property
|
||||
def ingress_url(self) -> Optional[str]:
|
||||
"""Return URL to ingress url."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def webui(self) -> Optional[str]:
|
||||
"""Return URL to webui or None."""
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def ingress_port(self) -> Optional[int]:
|
||||
"""Return Ingress port."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def panel_icon(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data[ATTR_PANEL_ICON]
|
||||
|
||||
@property
|
||||
def panel_title(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data.get(ATTR_PANEL_TITLE, self.name)
|
||||
|
||||
@property
|
||||
def panel_admin(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data[ATTR_PANEL_ADMIN]
|
||||
|
||||
@property
|
||||
def host_network(self) -> bool:
|
||||
"""Return True if add-on run on host network."""
|
||||
return self.data[ATTR_HOST_NETWORK]
|
||||
|
||||
@property
|
||||
def host_pid(self) -> bool:
|
||||
"""Return True if add-on run on host PID namespace."""
|
||||
return self.data[ATTR_HOST_PID]
|
||||
|
||||
@property
|
||||
def host_ipc(self) -> bool:
|
||||
"""Return True if add-on run on host IPC namespace."""
|
||||
return self.data[ATTR_HOST_IPC]
|
||||
|
||||
@property
|
||||
def host_dbus(self) -> bool:
|
||||
"""Return True if add-on run on host D-BUS."""
|
||||
return self.data[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self) -> Optional[List[str]]:
|
||||
"""Return devices of add-on."""
|
||||
return self.data.get(ATTR_DEVICES, [])
|
||||
|
||||
@property
|
||||
def auto_uart(self) -> bool:
|
||||
"""Return True if we should map all UART device."""
|
||||
return self.data[ATTR_AUTO_UART]
|
||||
|
||||
@property
|
||||
def tmpfs(self) -> Optional[str]:
|
||||
"""Return tmpfs of add-on."""
|
||||
return self.data.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self) -> Optional[Dict[str, str]]:
|
||||
"""Return environment of add-on."""
|
||||
return self.data.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self) -> List[str]:
|
||||
"""Return list of privilege."""
|
||||
return self.data.get(ATTR_PRIVILEGED, [])
|
||||
|
||||
@property
|
||||
def apparmor(self) -> str:
|
||||
"""Return True if AppArmor is enabled."""
|
||||
if not self.data.get(ATTR_APPARMOR):
|
||||
return SECURITY_DISABLE
|
||||
elif self.sys_host.apparmor.exists(self.slug):
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def legacy(self) -> bool:
|
||||
"""Return if the add-on don't support Home Assistant labels."""
|
||||
return self.data[ATTR_LEGACY]
|
||||
|
||||
@property
|
||||
def access_docker_api(self) -> bool:
|
||||
"""Return if the add-on need read-only Docker API access."""
|
||||
return self.data[ATTR_DOCKER_API]
|
||||
|
||||
@property
|
||||
def access_hassio_api(self) -> bool:
|
||||
"""Return True if the add-on access to Hass.io REASTful API."""
|
||||
return self.data[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
def access_homeassistant_api(self) -> bool:
|
||||
"""Return True if the add-on access to Home Assistant API proxy."""
|
||||
return self.data[ATTR_HOMEASSISTANT_API]
|
||||
|
||||
@property
|
||||
def hassio_role(self) -> str:
|
||||
"""Return Hass.io role for API."""
|
||||
return self.data[ATTR_HASSIO_ROLE]
|
||||
|
||||
@property
|
||||
def with_stdin(self) -> bool:
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
return self.data[ATTR_STDIN]
|
||||
|
||||
@property
|
||||
def with_ingress(self) -> bool:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return self.data[ATTR_INGRESS]
|
||||
|
||||
@property
|
||||
def ingress_panel(self) -> Optional[bool]:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def with_gpio(self) -> bool:
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
return self.data[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_kernel_modules(self) -> bool:
|
||||
"""Return True if the add-on access to kernel modules."""
|
||||
return self.data[ATTR_KERNEL_MODULES]
|
||||
|
||||
@property
|
||||
def with_full_access(self) -> bool:
|
||||
"""Return True if the add-on want full access to hardware."""
|
||||
return self.data[ATTR_FULL_ACCESS]
|
||||
|
||||
@property
|
||||
def with_devicetree(self) -> bool:
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def access_auth_api(self) -> bool:
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
return self.data[ATTR_AUTH_API]
|
||||
|
||||
@property
|
||||
def with_audio(self) -> bool:
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self.data[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def homeassistant_version(self) -> Optional[str]:
|
||||
"""Return min Home Assistant version they needed by Add-on."""
|
||||
return self.data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def url(self) -> Optional[str]:
|
||||
"""Return URL of add-on."""
|
||||
return self.data.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self) -> List[str]:
|
||||
"""Return list of supported arch."""
|
||||
return self.data[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def supported_machine(self) -> List[str]:
|
||||
"""Return list of supported machine."""
|
||||
return self.data.get(ATTR_MACHINE, MACHINE_ALL)
|
||||
|
||||
@property
|
||||
def image(self) -> str:
|
||||
"""Generate image name from data."""
|
||||
return self._image(self.data)
|
||||
|
||||
@property
|
||||
def need_build(self) -> bool:
|
||||
"""Return True if this add-on need a local build."""
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def map_volumes(self) -> Dict[str, str]:
|
||||
"""Return a dict of {volume: policy} from add-on."""
|
||||
volumes = {}
|
||||
for volume in self.data[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
volumes[result.group(1)] = result.group(2) or 'ro'
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_location(self) -> Path:
|
||||
"""Return path to this add-on."""
|
||||
return Path(self.data[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_icon(self) -> Path:
|
||||
"""Return path to add-on icon."""
|
||||
return Path(self.path_location, 'icon.png')
|
||||
|
||||
@property
|
||||
def path_logo(self) -> Path:
|
||||
"""Return path to add-on logo."""
|
||||
return Path(self.path_location, 'logo.png')
|
||||
|
||||
@property
|
||||
def path_changelog(self) -> Path:
|
||||
"""Return path to add-on changelog."""
|
||||
return Path(self.path_location, 'CHANGELOG.md')
|
||||
|
||||
@property
|
||||
def path_apparmor(self) -> Path:
|
||||
"""Return path to custom AppArmor profile."""
|
||||
return Path(self.path_location, 'apparmor.txt')
|
||||
|
||||
@property
|
||||
def schema(self) -> vol.Schema:
|
||||
"""Create a schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
if self.slug == other.slug:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _available(self, config) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
# Architecture
|
||||
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
||||
return False
|
||||
|
||||
# Machine / Hardware
|
||||
machine = config.get(ATTR_MACHINE) or MACHINE_ALL
|
||||
if self.sys_machine not in machine:
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version = config.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version
|
||||
if StrictVersion(self.sys_homeassistant.version) < StrictVersion(version):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _image(self, config) -> str:
|
||||
"""Generate image name from data."""
|
||||
# Repository with Dockerhub images
|
||||
if ATTR_IMAGE in config:
|
||||
arch = self.sys_arch.match(config[ATTR_ARCH])
|
||||
return config[ATTR_IMAGE].format(arch=arch)
|
||||
|
||||
# local build
|
||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||
|
||||
def install(self) -> Awaitable[None]:
|
||||
"""Install this add-on."""
|
||||
return self.sys_addons.install(self.slug)
|
||||
|
||||
def uninstall(self) -> Awaitable[None]:
|
||||
"""Uninstall this add-on."""
|
||||
return self.sys_addons.uninstall(self.slug)
|
||||
|
||||
def update(self) -> Awaitable[None]:
|
||||
"""Update this add-on."""
|
||||
return self.sys_addons.update(self.slug)
|
||||
|
||||
def rebuild(self) -> Awaitable[None]:
|
||||
"""Rebuild this add-on."""
|
||||
return self.sys_addons.rebuild(self.slug)
|
@@ -2,10 +2,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import (
|
||||
@@ -22,13 +20,12 @@ from ..const import (
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
from .model import AddonModel
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rating_security(addon: Addon) -> int:
|
||||
def rating_security(addon: AddonModel) -> int:
|
||||
"""Return 1-6 for security rating.
|
||||
|
||||
1 = not secure
|
||||
@@ -85,34 +82,6 @@ def rating_security(addon: Addon) -> int:
|
||||
return max(min(6, rating), 1)
|
||||
|
||||
|
||||
def get_hash_from_repository(name: str) -> str:
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def extract_hash_from_path(path: Path) -> str:
|
||||
"""Extract repo id from path."""
|
||||
repository_dir = path.parts[-1]
|
||||
|
||||
if not RE_SHA1.match(repository_dir):
|
||||
return get_hash_from_repository(repository_dir)
|
||||
return repository_dir
|
||||
|
||||
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if add-on is installed."""
|
||||
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
return False
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
||||
|
||||
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
|
@@ -1,29 +1,98 @@
|
||||
"""Validate add-ons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ARCH_ALL, ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_ARGS,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART, ATTR_AUTO_UPDATE, ATTR_BOOT, ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE,
|
||||
ATTR_MAINTAINER, ATTR_MAP, ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS,
|
||||
ATTR_PORTS, ATTR_PRIVILEGED, ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA,
|
||||
ATTR_SERVICES, ATTR_SLUG, ATTR_SQUASH, ATTR_STARTUP, ATTR_STATE,
|
||||
ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT, ATTR_TMPFS, ATTR_URL, ATTR_USER,
|
||||
ATTR_UUID, ATTR_VERSION, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
|
||||
PRIVILEGED_ALL, ROLE_ALL, ROLE_DEFAULT, STARTUP_ALL, STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES, STATE_STARTED, STATE_STOPPED)
|
||||
ARCH_ALL,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_ARGS,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_PANEL_ADMIN,
|
||||
ATTR_PANEL_ICON,
|
||||
ATTR_PANEL_TITLE,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SQUASH,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_URL,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
PRIVILEGED_ALL,
|
||||
ROLE_ALL,
|
||||
ROLE_DEFAULT,
|
||||
STARTUP_ALL,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
)
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, SHA256, UUID_MATCH)
|
||||
ALSA_DEVICE,
|
||||
DOCKER_PORTS,
|
||||
DOCKER_PORTS_DESCRIPTION,
|
||||
NETWORK_PORT,
|
||||
TOKEN,
|
||||
UUID_MATCH,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -87,8 +156,16 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(NETWORK_PORT, vol.Equal(0)),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
@@ -133,14 +210,6 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
||||
@@ -158,7 +227,8 @@ SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(str),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
@@ -167,6 +237,7 @@ SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
|
@@ -14,6 +14,7 @@ from .hassos import APIHassOS
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .info import APIInfo
|
||||
from .ingress import APIIngress
|
||||
from .proxy import APIProxy
|
||||
from .security import SecurityMiddleware
|
||||
from .services import APIServices
|
||||
@@ -47,6 +48,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_ingress()
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
@@ -186,6 +188,17 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
|
||||
def _register_ingress(self) -> None:
|
||||
"""Register Ingress functions."""
|
||||
api_ingress = APIIngress()
|
||||
api_ingress.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.post('/ingress/session', api_ingress.create_session),
|
||||
web.get('/ingress/panels', api_ingress.panels),
|
||||
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
|
||||
])
|
||||
|
||||
def _register_snapshots(self) -> None:
|
||||
"""Register snapshots functions."""
|
||||
api_snapshots = APISnapshots()
|
||||
@@ -233,29 +246,6 @@ class RestAPI(CoreSysAttributes):
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
||||
def create_response(panel_file):
|
||||
"""Create a function to generate a response."""
|
||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.add_routes(
|
||||
[web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
||||
])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
||||
self.webapp.add_routes([
|
||||
web.get('/app-es5/index.html', create_response('index')),
|
||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.70
|
||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
||||
|
||||
async def start(self) -> None:
|
||||
|
@@ -1,31 +1,93 @@
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..addons import AnyAddon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING, ATTR_HOST_PID,
|
||||
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE, ATTR_AUTH_API,
|
||||
ATTR_ADDONS,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_AVAILABLE,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD,
|
||||
ATTR_CHANGELOG,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DETACHED,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_ICON,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_URL,
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_LOGO,
|
||||
ATTR_LONG_DESCRIPTION,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAINTAINER,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SOURCE,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
CONTENT_TYPE_BINARY,
|
||||
CONTENT_TYPE_PNG,
|
||||
CONTENT_TYPE_TEXT,
|
||||
REQUEST_FROM,
|
||||
STATE_NONE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS, ALSA_DEVICE
|
||||
from ..exceptions import APIError
|
||||
from ..validate import ALSA_DEVICE, DOCKER_PORTS
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -40,6 +102,7 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||
})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
@@ -51,7 +114,7 @@ SCHEMA_SECURITY = vol.Schema({
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
def _extract_addon(self, request: web.Request, check_installed: bool = True) -> AnyAddon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug = request.match_info.get('addon')
|
||||
|
||||
@@ -69,16 +132,16 @@ class APIAddons(CoreSysAttributes):
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
for addon in self.sys_addons.all:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version if addon.is_installed else None,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
@@ -89,7 +152,7 @@ class APIAddons(CoreSysAttributes):
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.sys_addons.list_repositories:
|
||||
for repository in self.sys_store.all:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
@@ -104,37 +167,38 @@ class APIAddons(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all add-on data."""
|
||||
await asyncio.shield(self.sys_addons.reload())
|
||||
return True
|
||||
async def reload(self, request: web.Request) -> None:
|
||||
"""Reload all add-on data from store."""
|
||||
await asyncio.shield(self.sys_store.reload())
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
return {
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_AUTO_UPDATE: None,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: None,
|
||||
ATTR_LAST_VERSION: addon.latest_version,
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_STATE: STATE_NONE,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_NETWORK_DESCRIPTION: addon.ports_description,
|
||||
ATTR_HOST_NETWORK: addon.host_network,
|
||||
ATTR_HOST_PID: addon.host_pid,
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
@@ -146,8 +210,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_WEBUI: None,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||
ATTR_AUTH_API: addon.access_auth_api,
|
||||
@@ -157,21 +221,43 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_AUDIO_INPUT: None,
|
||||
ATTR_AUDIO_OUTPUT: None,
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
ATTR_IP_ADDRESS: None,
|
||||
ATTR_INGRESS: addon.with_ingress,
|
||||
ATTR_INGRESS_ENTRY: None,
|
||||
ATTR_INGRESS_URL: None,
|
||||
ATTR_INGRESS_PORT: None,
|
||||
ATTR_INGRESS_PANEL: None,
|
||||
}
|
||||
|
||||
if addon.is_installed:
|
||||
data.update({
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||
ATTR_INGRESS_URL: addon.ingress_url,
|
||||
ATTR_INGRESS_PORT: addon.ingress_port,
|
||||
ATTR_INGRESS_PANEL: addon.ingress_panel,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||
ATTR_VERSION: addon.version,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Store user options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
|
||||
body = await api_validate(addon_schema, request)
|
||||
|
||||
if ATTR_OPTIONS in body:
|
||||
@@ -186,12 +272,14 @@ class APIAddons(CoreSysAttributes):
|
||||
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_INGRESS_PANEL in body:
|
||||
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def security(self, request):
|
||||
async def security(self, request: web.Request) -> None:
|
||||
"""Store security options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
body = await api_validate(SCHEMA_SECURITY, request)
|
||||
@@ -201,17 +289,13 @@ class APIAddons(CoreSysAttributes):
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon(request)
|
||||
stats = await addon.stats()
|
||||
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
@@ -223,19 +307,19 @@ class APIAddons(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request):
|
||||
def install(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Install add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request):
|
||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Uninstall add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Start add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
@@ -249,29 +333,29 @@ class APIAddons(CoreSysAttributes):
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request):
|
||||
def update(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Update add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if addon.last_version == addon.version_installed:
|
||||
if addon.latest_version == addon.version:
|
||||
raise APIError("No update available!")
|
||||
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request):
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Rebuild local build add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.need_build:
|
||||
@@ -280,13 +364,13 @@ class APIAddons(CoreSysAttributes):
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return logs from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request):
|
||||
async def icon(self, request: web.Request) -> bytes:
|
||||
"""Return icon from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_icon:
|
||||
@@ -296,7 +380,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
async def logo(self, request: web.Request) -> bytes:
|
||||
"""Return logo from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
@@ -306,7 +390,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request):
|
||||
async def changelog(self, request: web.Request) -> str:
|
||||
"""Return changelog from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_changelog:
|
||||
@@ -316,17 +400,17 @@ class APIAddons(CoreSysAttributes):
|
||||
return changelog.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request):
|
||||
async def stdin(self, request: web.Request) -> None:
|
||||
"""Write to stdin of add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError("STDIN not supported by add-on")
|
||||
|
||||
data = await request.read()
|
||||
return await asyncio.shield(addon.write_stdin(data))
|
||||
await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
|
||||
def _pretty_devices(addon):
|
||||
def _pretty_devices(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
@@ -334,7 +418,7 @@ def _pretty_devices(addon):
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon):
|
||||
def _pretty_services(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified services role list."""
|
||||
services = []
|
||||
for name, access in addon.services_role.items():
|
||||
|
@@ -1,27 +1,31 @@
|
||||
"""Init file for Hass.io HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST)
|
||||
ATTR_BOARD,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for HassOS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HassOS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
@@ -32,7 +36,7 @@ class APIHassOS(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
@@ -40,7 +44,7 @@ class APIHassOS(CoreSysAttributes):
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request):
|
||||
async def update_cli(self, request: web.Request) -> None:
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
@@ -48,6 +52,6 @@ class APIHassOS(CoreSysAttributes):
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request):
|
||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
||||
|
@@ -27,6 +27,7 @@ from ..const import (
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_IP_ADDRESS,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
@@ -40,8 +41,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(DOCKER_IMAGE),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
@@ -62,8 +63,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.latest_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_IP_ADDRESS: str(self.sys_homeassistant.ip_address),
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
@@ -81,7 +83,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
self.sys_homeassistant.latest_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
@@ -104,6 +106,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
self.sys_homeassistant.api_password = None
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@@ -128,7 +131,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
|
||||
|
@@ -1,9 +1,20 @@
|
||||
"""Init file for Hass.io info RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH)
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_HASSOS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_LOGGING,
|
||||
ATTR_MACHINE,
|
||||
ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
@@ -14,7 +25,7 @@ class APIInfo(CoreSysAttributes):
|
||||
"""Handle RESTful API for info functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show system info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
@@ -25,4 +36,5 @@ class APIInfo(CoreSysAttributes):
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_LOGGING: self.sys_config.logging,
|
||||
}
|
||||
|
258
hassio/api/ingress.py
Normal file
258
hassio/api/ingress.py
Normal file
@@ -0,0 +1,258 @@
|
||||
"""Hass.io Add-on ingress service."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway,
|
||||
HTTPServiceUnavailable,
|
||||
HTTPUnauthorized,
|
||||
)
|
||||
from multidict import CIMultiDict, istr
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_ADMIN,
|
||||
ATTR_ICON,
|
||||
ATTR_SESSION,
|
||||
ATTR_TITLE,
|
||||
ATTR_PANELS,
|
||||
ATTR_ENABLE,
|
||||
COOKIE_INGRESS,
|
||||
HEADER_TOKEN,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIIngress(CoreSysAttributes):
|
||||
"""Ingress view to handle add-on webui routing."""
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
if not addon:
|
||||
_LOGGER.warning("Ingress for %s not available", token)
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
return addon
|
||||
|
||||
def _check_ha_access(self, request: web.Request) -> None:
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
_LOGGER.warning("Ingress is only available behind Home Assistant")
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
def _create_url(self, addon: Addon, path: str) -> str:
|
||||
"""Create URL to container."""
|
||||
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
|
||||
|
||||
@api_process
|
||||
async def panels(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a list of panel data."""
|
||||
addons = {}
|
||||
for addon in self.sys_ingress.addons:
|
||||
addons[addon.slug] = {
|
||||
ATTR_TITLE: addon.panel_title,
|
||||
ATTR_ICON: addon.panel_icon,
|
||||
ATTR_ADMIN: addon.panel_admin,
|
||||
ATTR_ENABLE: addon.ingress_panel,
|
||||
}
|
||||
|
||||
return {ATTR_PANELS: addons}
|
||||
|
||||
@api_process
|
||||
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
"""Route data to Hass.io ingress service."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
try:
|
||||
# Websocket
|
||||
if _is_websocket(request):
|
||||
return await self._handle_websocket(request, addon, path)
|
||||
|
||||
# Request
|
||||
return await self._handle_request(request, addon, path)
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Ingress error: %s", err)
|
||||
|
||||
raise HTTPBadGateway() from None
|
||||
|
||||
async def _handle_websocket(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> web.WebSocketResponse:
|
||||
"""Ingress route for websocket."""
|
||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||
req_protocols = [
|
||||
str(proto.strip())
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = ()
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
)
|
||||
await ws_server.prepare(request)
|
||||
|
||||
# Preparing
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
url = "{}?{}".format(url, request.query_string)
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
_websocket_forward(ws_server, ws_client),
|
||||
_websocket_forward(ws_client, ws_server),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
|
||||
return ws_server
|
||||
|
||||
async def _handle_request(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> Union[web.Response, web.StreamResponse]:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
data = await request.read()
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
async with self.sys_websession.request(
|
||||
request.method, url, headers=source_header, params=request.query, data=data
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
# Simple request
|
||||
if (
|
||||
hdrs.CONTENT_LENGTH in result.headers
|
||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||
):
|
||||
# Return Response
|
||||
body = await result.read()
|
||||
return web.Response(
|
||||
headers=headers,
|
||||
status=result.status,
|
||||
content_type=result.content_type,
|
||||
body=body,
|
||||
)
|
||||
|
||||
# Stream response
|
||||
response = web.StreamResponse(status=result.status, headers=headers)
|
||||
response.content_type = result.content_type
|
||||
|
||||
try:
|
||||
await response.prepare(request)
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
await response.write(data)
|
||||
|
||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError) as err:
|
||||
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: str
|
||||
) -> Union[CIMultiDict, Dict[str, str]]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
|
||||
# filter flags
|
||||
for name, value in request.headers.items():
|
||||
if name in (hdrs.CONTENT_LENGTH, hdrs.CONTENT_ENCODING, istr(HEADER_TOKEN)):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
# Update X-Forwarded-For
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> Dict[str, str]:
|
||||
"""Create response header."""
|
||||
headers = {}
|
||||
|
||||
for name, value in response.headers.items():
|
||||
if name in (
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _is_websocket(request: web.Request) -> bool:
|
||||
"""Return True if request is a websocket."""
|
||||
headers = request.headers
|
||||
|
||||
if (
|
||||
"upgrade" in headers.get(hdrs.CONNECTION, "").lower()
|
||||
and headers.get(hdrs.UPGRADE, "").lower() == "websocket"
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
async def _websocket_forward(ws_from, ws_to):
|
||||
"""Handle websocket message directly."""
|
||||
try:
|
||||
async for msg in ws_from:
|
||||
if msg.type == aiohttp.WSMsgType.TEXT:
|
||||
await ws_to.send_str(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.BINARY:
|
||||
await ws_to.send_bytes(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.PING:
|
||||
await ws_to.ping()
|
||||
elif msg.type == aiohttp.WSMsgType.PONG:
|
||||
await ws_to.pong()
|
||||
elif ws_to.closed:
|
||||
await ws_to.close(code=ws_to.close_code, message=msg.extra)
|
||||
except RuntimeError:
|
||||
_LOGGER.warning("Ingress Websocket runtime error")
|
File diff suppressed because one or more lines are too long
Binary file not shown.
1
hassio/api/panel/chunk.1b30ffdc501071af245c.js
Normal file
1
hassio/api/panel/chunk.1b30ffdc501071af245c.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.1b30ffdc501071af245c.js.gz
Normal file
BIN
hassio/api/panel/chunk.1b30ffdc501071af245c.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js
Normal file
1
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js
Normal file
@@ -0,0 +1 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(121),i=t.n(e),o=t(123),u=t.n(o),a=i.a,c=u.a}}]);
|
BIN
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js.gz
Normal file
BIN
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/chunk.510634470d399e194ace.js
Normal file
3
hassio/api/panel/chunk.510634470d399e194ace.js
Normal file
File diff suppressed because one or more lines are too long
10
hassio/api/panel/chunk.510634470d399e194ace.js.LICENSE
Normal file
10
hassio/api/panel/chunk.510634470d399e194ace.js.LICENSE
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.510634470d399e194ace.js.gz
Normal file
BIN
hassio/api/panel/chunk.510634470d399e194ace.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.8038876231b1b1817795.js","sourceRoot":""}
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.510634470d399e194ace.js","sourceRoot":""}
|
3
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js
Normal file
3
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js
Normal file
File diff suppressed because one or more lines are too long
21
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.LICENSE
Normal file
21
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.gz
Normal file
BIN
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.d86ead4948c3bb8d56b2.js","sourceRoot":""}
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.564a2f7b1c38ddaa4ce0.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.5d31a1778f717ac8b063.js
Normal file
1
hassio/api/panel/chunk.5d31a1778f717ac8b063.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.5d31a1778f717ac8b063.js.gz
Normal file
BIN
hassio/api/panel/chunk.5d31a1778f717ac8b063.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/chunk.659084fef4e3b7b66a76.js
Normal file
3
hassio/api/panel/chunk.659084fef4e3b7b66a76.js
Normal file
File diff suppressed because one or more lines are too long
@@ -8,6 +8,18 @@ Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @fileoverview
|
||||
* @suppress {checkPrototypalTypes}
|
||||
* @license Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
|
||||
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
|
||||
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
|
||||
* Google as part of the polymer project is also subject to an additional IP
|
||||
* rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
BIN
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.gz
Normal file
BIN
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.map
Normal file
1
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.659084fef4e3b7b66a76.js","sourceRoot":""}
|
3
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js
Normal file
3
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js
Normal file
File diff suppressed because one or more lines are too long
31
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.LICENSE
Normal file
31
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.LICENSE
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.gz
Normal file
BIN
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.map
Normal file
1
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.6e9c87e51920a9c354e5.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js
Normal file
1
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js.gz
Normal file
BIN
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
3
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js
Normal file
3
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js
Normal file
File diff suppressed because one or more lines are too long
@@ -1,3 +1,28 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2019 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
@@ -30,20 +55,6 @@ part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright 2018 Google Inc. All Rights Reserved.
|
||||
@@ -122,17 +133,6 @@ and limitations under the License.
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2019 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
|
||||
@@ -147,17 +147,6 @@ found at http://polymer.github.io/PATENTS.txt
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
|
||||
@@ -178,3 +167,14 @@ The complete set of contributors may be found at http://polymer.github.io/CONTRI
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.gz
Normal file
BIN
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.map
Normal file
1
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.a7e5fb452cd1b3a5faef.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.b3340b3df270d20af4a1.js
Normal file
1
hassio/api/panel/chunk.b3340b3df270d20af4a1.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.b3340b3df270d20af4a1.js.gz
Normal file
BIN
hassio/api/panel/chunk.b3340b3df270d20af4a1.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.b60fb48c5280275dd7e2.js
Normal file
1
hassio/api/panel/chunk.b60fb48c5280275dd7e2.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.b60fb48c5280275dd7e2.js.gz
Normal file
BIN
hassio/api/panel/chunk.b60fb48c5280275dd7e2.js.gz
Normal file
Binary file not shown.
@@ -1 +0,0 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{110:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(101),i=t.n(e),o=t(103),u=t.n(o),a=i.a,c=u.a}}]);
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
3
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js
Normal file
3
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js
Normal file
File diff suppressed because one or more lines are too long
10
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.LICENSE
Normal file
10
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.LICENSE
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.gz
Normal file
BIN
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.map
Normal file
1
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.f15d7f41c0d302cbbc7a.js","sourceRoot":""}
|
@@ -1 +1 @@
|
||||
!function(e){function t(t){for(var n,o,i=t[0],u=t[1],a=0,l=[];a<i.length;a++)o=i[a],r[o]&&l.push(r[o][0]),r[o]=0;for(n in u)Object.prototype.hasOwnProperty.call(u,n)&&(e[n]=u[n]);for(c&&c(t);l.length;)l.shift()()}var n={},r={1:0};function o(t){if(n[t])return n[t].exports;var r=n[t]={i:t,l:!1,exports:{}};return e[t].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var t=[],n=r[e];if(0!==n)if(n)t.push(n[2]);else{var i=new Promise(function(t,o){n=r[e]=[t,o]});t.push(n[2]=i);var u,a=document.createElement("script");a.charset="utf-8",a.timeout=120,o.nc&&a.setAttribute("nonce",o.nc),a.src=function(e){return o.p+"chunk."+{0:"d86ead4948c3bb8d56b2",2:"75766aa821239c9936dc",3:"7b2353341ba15ea393c7",4:"b74ddf4cacc7d5de8a55",5:"05bbfb49a092df0b4304",6:"8038876231b1b1817795",7:"088b1034e27d00ee9329"}[e]+".js"}(e),u=function(t){a.onerror=a.onload=null,clearTimeout(c);var n=r[e];if(0!==n){if(n){var o=t&&("load"===t.type?"missing":t.type),i=t&&t.target&&t.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,n[1](u)}r[e]=void 0}};var c=setTimeout(function(){u({type:"timeout",target:a})},12e4);a.onerror=a.onload=u,document.head.appendChild(a)}return Promise.all(t)},o.m=e,o.c=n,o.d=function(e,t,n){o.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,t){if(1&t&&(e=o(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(o.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var r in e)o.d(n,r,function(t){return e[t]}.bind(null,r));return n},o.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(t,"a",t),t},o.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=t,i=i.slice();for(var a=0;a<i.length;a++)t(i[a]);var c=u;o(o.s=0)}([function(e,t,n){window.loadES5Adapter().then(function(){Promise.all([n.e(0),n.e(2)]).then(n.bind(null,2)),Promise.all([n.e(0),n.e(6),n.e(3)]).then(n.bind(null,1))}),document.body.style.height="100%"}]);
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,u=[];c<a.length;c++)o=a[c],r[o]&&u.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(f&&f(n);u.length;)u.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"564a2f7b1c38ddaa4ce0",1:"659084fef4e3b7b66a76",2:"510634470d399e194ace",3:"f15d7f41c0d302cbbc7a",5:"5d31a1778f717ac8b063",6:"b60fb48c5280275dd7e2",7:"3a63ad36bccf4ea567fa",8:"a571dfa106202cc57af6",9:"a7e5fb452cd1b3a5faef",10:"b3340b3df270d20af4a1",11:"6e9c87e51920a9c354e5",12:"1b30ffdc501071af245c",13:"739b67c99ab56cdbd75d"}[e]+".js"}(e),i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var f=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(9),t.e(6)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -35,7 +35,7 @@ class APIProxy(CoreSysAttributes):
|
||||
elif not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Not permitted API access: %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
_LOGGER.debug("%s access from %s", request.path, addon.slug)
|
||||
return
|
||||
|
||||
raise HTTPUnauthorized()
|
||||
|
@@ -6,12 +6,19 @@ from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized, HTTPForbidden
|
||||
|
||||
from ..const import (
|
||||
HEADER_TOKEN, REQUEST_FROM, ROLE_ADMIN, ROLE_DEFAULT, ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER, ROLE_BACKUP)
|
||||
HEADER_TOKEN,
|
||||
REQUEST_FROM,
|
||||
ROLE_ADMIN,
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER,
|
||||
ROLE_BACKUP,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# fmt: off
|
||||
|
||||
# Block Anytime
|
||||
BLACKLIST = re.compile(
|
||||
@@ -65,7 +72,7 @@ ADDONS_ROLE_ACCESS = {
|
||||
r"|/hardware/.+"
|
||||
r"|/hassos/.+"
|
||||
r"|/supervisor/.+"
|
||||
r"|/addons(?:/[^/]+/(?!security).+)?"
|
||||
r"|/addons(?:/[^/]+/(?!security).+|/reload)?"
|
||||
r"|/snapshots.*"
|
||||
r")$"
|
||||
),
|
||||
@@ -74,6 +81,8 @@ ADDONS_ROLE_ACCESS = {
|
||||
),
|
||||
}
|
||||
|
||||
# fmt: off
|
||||
|
||||
|
||||
class SecurityMiddleware(CoreSysAttributes):
|
||||
"""Security middleware functions."""
|
||||
@@ -104,9 +113,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Home-Assistant
|
||||
# UUID check need removed with 131
|
||||
if hassio_token in (self.sys_homeassistant.uuid,
|
||||
self.sys_homeassistant.hassio_token):
|
||||
if hassio_token == self.sys_homeassistant.hassio_token:
|
||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||
request_from = self.sys_homeassistant
|
||||
|
||||
|
@@ -1,34 +1,64 @@
|
||||
"""Init file for Hass.io Supervisor RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
ATTR_ADDONS,
|
||||
ATTR_ADDONS_REPOSITORIES,
|
||||
ATTR_ARCH,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_ICON,
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_LOGGING,
|
||||
ATTR_LOGO,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SLUG,
|
||||
ATTR_STATE,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
CONTENT_TYPE_BINARY,
|
||||
HASSIO_VERSION,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||
from ..exceptions import APIError
|
||||
from ..utils.validate import validate_timezone
|
||||
from ..validate import CHANNELS, LOG_LEVEL, REPOSITORIES, WAIT_BOOT
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
})
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
vol.Optional(ATTR_LOGGING): LOG_LEVEL,
|
||||
vol.Optional(ATTR_DEBUG): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEBUG_BLOCK): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APISupervisor(CoreSysAttributes):
|
||||
@@ -40,36 +70,39 @@ class APISupervisor(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
for addon in self.sys_addons.installed:
|
||||
list_addons.append(
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_ARCH: self.sys_supervisor.arch,
|
||||
ATTR_IP_ADDRESS: str(self.sys_supervisor.ip_address),
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_LOGGING: self.sys_config.logging,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set Supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
@@ -82,20 +115,26 @@ class APISupervisor(CoreSysAttributes):
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_DEBUG in body:
|
||||
self.sys_config.debug = body[ATTR_DEBUG]
|
||||
|
||||
if ATTR_DEBUG_BLOCK in body:
|
||||
self.sys_config.debug_block = body[ATTR_DEBUG_BLOCK]
|
||||
|
||||
if ATTR_LOGGING in body:
|
||||
self.sys_config.logging = body[ATTR_LOGGING]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||
await asyncio.shield(self.sys_store.update_repositories(new))
|
||||
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_supervisor.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
@@ -108,31 +147,21 @@ class APISupervisor(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
|
||||
|
||||
if version == self.sys_supervisor.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(self.sys_supervisor.update(version))
|
||||
await asyncio.shield(self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload add-ons, configuration, etc."""
|
||||
tasks = [
|
||||
self.sys_updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(asyncio.wait(tasks))
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
raise APIError("Some reload task fails!")
|
||||
|
||||
return True
|
||||
return asyncio.shield(self.sys_updater.reload())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return supervisor Docker logs."""
|
||||
return self.sys_supervisor.logs()
|
||||
|
@@ -19,9 +19,11 @@ from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .store import StoreManager
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
@@ -49,8 +51,10 @@ async def initialize_coresys():
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.ingress = Ingress(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.store = StoreManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
@@ -65,14 +69,15 @@ async def initialize_coresys():
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys):
|
||||
def initialize_system_data(coresys: CoreSys):
|
||||
"""Set up the default configuration and create folders."""
|
||||
config = coresys.config
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info("Create Home Assistant configuration folder %s",
|
||||
config.path_homeassistant)
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant configuration folder %s", config.path_homeassistant
|
||||
)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
@@ -82,18 +87,19 @@ def initialize_system_data(coresys):
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s",
|
||||
config.path_addons_data)
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on local repository folder %s",
|
||||
config.path_addons_local)
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on local repository folder %s", config.path_addons_local
|
||||
)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on git repositories folder %s", config.path_addons_git
|
||||
)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
@@ -116,10 +122,11 @@ def initialize_system_data(coresys):
|
||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
||||
config.path_apparmor.mkdir()
|
||||
|
||||
return config
|
||||
# Update log level
|
||||
coresys.config.modify_log_level()
|
||||
|
||||
|
||||
def migrate_system_env(coresys):
|
||||
def migrate_system_env(coresys: CoreSys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
@@ -154,7 +161,8 @@ def initialize_logging():
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_environment():
|
||||
@@ -188,19 +196,29 @@ def check_environment():
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGTERM,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGHUP,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGINT,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
||||
|
||||
def supervisor_debugger(coresys: CoreSys) -> None:
|
||||
"""Setup debugger if needed."""
|
||||
if not coresys.config.debug or not coresys.dev:
|
||||
return
|
||||
import ptvsd
|
||||
|
||||
_LOGGER.info("Initialize Hass.io debugger")
|
||||
|
||||
ptvsd.enable_attach(address=('0.0.0.0', 33333), redirect_output=True)
|
||||
if coresys.config.debug_block:
|
||||
ptvsd.wait_for_attach()
|
||||
|
@@ -2,21 +2,29 @@
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_LOGGING,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_WAIT_BOOT,
|
||||
FILE_HASSIO_CONFIG,
|
||||
HASSIO_DATA,
|
||||
)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath('homeassistant')
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
|
||||
@@ -45,7 +53,7 @@ class CoreConfig(JsonConfig):
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
config_file = Path(self.path_homeassistant, 'configuration.yaml')
|
||||
config_file = Path(self.path_homeassistant, "configuration.yaml")
|
||||
try:
|
||||
assert config_file.exists()
|
||||
configuration = config_file.read_text()
|
||||
@@ -53,7 +61,7 @@ class CoreConfig(JsonConfig):
|
||||
data = RE_TIMEZONE.search(configuration)
|
||||
assert data
|
||||
|
||||
timezone = data.group('timezone')
|
||||
timezone = data.group("timezone")
|
||||
pytz.timezone(timezone)
|
||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
||||
_LOGGER.debug("Can't parse Home Assistant timezone")
|
||||
@@ -67,15 +75,51 @@ class CoreConfig(JsonConfig):
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
def wait_boot(self) -> int:
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
def wait_boot(self, value: int):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def debug(self) -> bool:
|
||||
"""Return True if ptvsd is enabled."""
|
||||
return self._data[ATTR_DEBUG]
|
||||
|
||||
@debug.setter
|
||||
def debug(self, value: bool):
|
||||
"""Set debug mode."""
|
||||
self._data[ATTR_DEBUG] = value
|
||||
|
||||
@property
|
||||
def debug_block(self) -> bool:
|
||||
"""Return True if ptvsd should waiting."""
|
||||
return self._data[ATTR_DEBUG_BLOCK]
|
||||
|
||||
@debug_block.setter
|
||||
def debug_block(self, value: bool):
|
||||
"""Set debug wait mode."""
|
||||
self._data[ATTR_DEBUG_BLOCK] = value
|
||||
|
||||
@property
|
||||
def logging(self) -> str:
|
||||
"""Return log level of system."""
|
||||
return self._data[ATTR_LOGGING]
|
||||
|
||||
@logging.setter
|
||||
def logging(self, value: str):
|
||||
"""Set system log level."""
|
||||
self._data[ATTR_LOGGING] = value
|
||||
self.modify_log_level()
|
||||
|
||||
def modify_log_level(self) -> None:
|
||||
"""Change log level."""
|
||||
lvl = getattr(logging, self.logging.upper())
|
||||
logging.basicConfig(level=lvl)
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot datetime."""
|
||||
@@ -99,7 +143,7 @@ class CoreConfig(JsonConfig):
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return Hass.io data path external for Docker."""
|
||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||
return PurePath(os.environ["SUPERVISOR_SHARE"])
|
||||
|
||||
@property
|
||||
def path_extern_homeassistant(self):
|
||||
|
@@ -2,7 +2,8 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = "152"
|
||||
|
||||
HASSIO_VERSION = "163"
|
||||
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
@@ -22,6 +23,7 @@ FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
||||
FILE_HASSIO_INGRESS = Path(HASSIO_DATA, "ingress.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
|
||||
@@ -51,8 +53,9 @@ CONTENT_TYPE_JSON = "application/json"
|
||||
CONTENT_TYPE_TEXT = "text/plain"
|
||||
CONTENT_TYPE_TAR = "application/tar"
|
||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||
HEADER_HA_ACCESS = "x-ha-access"
|
||||
HEADER_TOKEN = "x-hassio-key"
|
||||
HEADER_HA_ACCESS = "X-Ha-Access"
|
||||
HEADER_TOKEN = "X-Hassio-Key"
|
||||
COOKIE_INGRESS = "ingress_session"
|
||||
|
||||
ENV_TOKEN = "HASSIO_TOKEN"
|
||||
ENV_TIME = "TZ"
|
||||
@@ -64,6 +67,7 @@ ATTR_WAIT_BOOT = "wait_boot"
|
||||
ATTR_DEPLOYMENT = "deployment"
|
||||
ATTR_WATCHDOG = "watchdog"
|
||||
ATTR_CHANGELOG = "changelog"
|
||||
ATTR_LOGGING = "logging"
|
||||
ATTR_DATE = "date"
|
||||
ATTR_ARCH = "arch"
|
||||
ATTR_LONG_DESCRIPTION = "long_description"
|
||||
@@ -89,6 +93,7 @@ ATTR_DESCRIPTON = "description"
|
||||
ATTR_STARTUP = "startup"
|
||||
ATTR_BOOT = "boot"
|
||||
ATTR_PORTS = "ports"
|
||||
ATTR_PORTS_DESCRIPTION = "ports_description"
|
||||
ATTR_PORT = "port"
|
||||
ATTR_SSL = "ssl"
|
||||
ATTR_MAP = "map"
|
||||
@@ -119,6 +124,7 @@ ATTR_HOST_PID = "host_pid"
|
||||
ATTR_HOST_IPC = "host_ipc"
|
||||
ATTR_HOST_DBUS = "host_dbus"
|
||||
ATTR_NETWORK = "network"
|
||||
ATTR_NETWORK_DESCRIPTION = "network_description"
|
||||
ATTR_TMPFS = "tmpfs"
|
||||
ATTR_PRIVILEGED = "privileged"
|
||||
ATTR_USER = "user"
|
||||
@@ -187,6 +193,23 @@ ATTR_SUPERVISOR = "supervisor"
|
||||
ATTR_AUTH_API = "auth_api"
|
||||
ATTR_KERNEL_MODULES = "kernel_modules"
|
||||
ATTR_SUPPORTED_ARCH = "supported_arch"
|
||||
ATTR_INGRESS = "ingress"
|
||||
ATTR_INGRESS_PORT = "ingress_port"
|
||||
ATTR_INGRESS_ENTRY = "ingress_entry"
|
||||
ATTR_INGRESS_TOKEN = "ingress_token"
|
||||
ATTR_INGRESS_URL = "ingress_url"
|
||||
ATTR_INGRESS_PANEL = "ingress_panel"
|
||||
ATTR_PANEL_ICON = "panel_icon"
|
||||
ATTR_PANEL_TITLE = "panel_title"
|
||||
ATTR_PANEL_ADMIN = "panel_admin"
|
||||
ATTR_TITLE = "title"
|
||||
ATTR_ENABLE = "enable"
|
||||
ATTR_IP_ADDRESS = "ip_address"
|
||||
ATTR_SESSION = "session"
|
||||
ATTR_ADMIN = "admin"
|
||||
ATTR_PANELS = "panels"
|
||||
ATTR_DEBUG = "debug"
|
||||
ATTR_DEBUG_BLOCK = "debug_block"
|
||||
|
||||
PROVIDE_SERVICE = "provide"
|
||||
NEED_SERVICE = "need"
|
||||
|
@@ -44,6 +44,9 @@ class HassIO(CoreSysAttributes):
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Stores
|
||||
await self.sys_store.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
@@ -62,6 +65,9 @@ class HassIO(CoreSysAttributes):
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# Load ingress
|
||||
await self.sys_ingress.load()
|
||||
|
||||
# start dns forwarding
|
||||
self.sys_create_task(self.sys_dns.start())
|
||||
|
||||
@@ -131,6 +137,7 @@ class HassIO(CoreSysAttributes):
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close(),
|
||||
self.sys_ingress.unload(),
|
||||
]
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
|
@@ -23,9 +23,11 @@ if TYPE_CHECKING:
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .store import StoreManager
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
@@ -63,9 +65,11 @@ class CoreSys:
|
||||
self._snapshots: SnapshotManager = None
|
||||
self._tasks: Tasks = None
|
||||
self._host: HostManager = None
|
||||
self._ingress: Ingress = None
|
||||
self._dbus: DBusManager = None
|
||||
self._hassos: HassOS = None
|
||||
self._services: ServiceManager = None
|
||||
self._store: StoreManager = None
|
||||
self._discovery: Discovery = None
|
||||
|
||||
@property
|
||||
@@ -76,7 +80,7 @@ class CoreSys:
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self) -> str:
|
||||
def dev(self) -> bool:
|
||||
"""Return True if we run dev mode."""
|
||||
return self._updater.channel == CHANNEL_DEV
|
||||
|
||||
@@ -221,6 +225,18 @@ class CoreSys:
|
||||
raise RuntimeError("AddonManager already set!")
|
||||
self._addons = value
|
||||
|
||||
@property
|
||||
def store(self) -> StoreManager:
|
||||
"""Return StoreManager object."""
|
||||
return self._store
|
||||
|
||||
@store.setter
|
||||
def store(self, value: StoreManager):
|
||||
"""Set a StoreManager object."""
|
||||
if self._store:
|
||||
raise RuntimeError("StoreManager already set!")
|
||||
self._store = value
|
||||
|
||||
@property
|
||||
def snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
@@ -293,6 +309,18 @@ class CoreSys:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
self._host = value
|
||||
|
||||
@property
|
||||
def ingress(self) -> Ingress:
|
||||
"""Return Ingress object."""
|
||||
return self._ingress
|
||||
|
||||
@ingress.setter
|
||||
def ingress(self, value: Ingress):
|
||||
"""Set a Ingress object."""
|
||||
if self._ingress:
|
||||
raise RuntimeError("Ingress already set!")
|
||||
self._ingress = value
|
||||
|
||||
@property
|
||||
def hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
@@ -411,6 +439,11 @@ class CoreSysAttributes:
|
||||
"""Return AddonManager object."""
|
||||
return self.coresys.addons
|
||||
|
||||
@property
|
||||
def sys_store(self) -> StoreManager:
|
||||
"""Return StoreManager object."""
|
||||
return self.coresys.store
|
||||
|
||||
@property
|
||||
def sys_snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
@@ -441,6 +474,11 @@ class CoreSysAttributes:
|
||||
"""Return HostManager object."""
|
||||
return self.coresys.host
|
||||
|
||||
@property
|
||||
def sys_ingress(self) -> Ingress:
|
||||
"""Return Ingress object."""
|
||||
return self.coresys.ingress
|
||||
|
||||
@property
|
||||
def sys_hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
|
@@ -6,3 +6,5 @@ ATTR_PORT = "port"
|
||||
ATTR_PROTOCOL = "protocol"
|
||||
ATTR_SSL = "ssl"
|
||||
ATTR_USERNAME = "username"
|
||||
ATTR_API_KEY = "api_key"
|
||||
ATTR_SERIAL = "serial"
|
||||
|
@@ -3,9 +3,14 @@ import voluptuous as vol
|
||||
|
||||
from hassio.validate import NETWORK_PORT
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT
|
||||
from ..const import ATTR_HOST, ATTR_PORT, ATTR_API_KEY, ATTR_SERIAL
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT}
|
||||
{
|
||||
vol.Required(ATTR_HOST): vol.Coerce(str),
|
||||
vol.Required(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Required(ATTR_SERIAL): vol.Coerce(str),
|
||||
vol.Required(ATTR_API_KEY): vol.Coerce(str),
|
||||
}
|
||||
)
|
||||
|
@@ -1,12 +1,14 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from .network import DockerNetwork
|
||||
from ..const import SOCKET_DOCKER
|
||||
from ..exceptions import DockerAPIError
|
||||
from .network import DockerNetwork
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,8 +16,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
@attr.s(frozen=True)
|
||||
class CommandReturn:
|
||||
"""Return object from command run."""
|
||||
exit_code = attr.ib()
|
||||
output = attr.ib()
|
||||
|
||||
exit_code: int = attr.ib()
|
||||
output: bytes = attr.ib()
|
||||
|
||||
|
||||
class DockerAPI:
|
||||
@@ -26,75 +29,87 @@ class DockerAPI:
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.docker = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||
version='auto', timeout=900)
|
||||
self.network = DockerNetwork(self.docker)
|
||||
self.docker: docker.DockerClient = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version="auto", timeout=900
|
||||
)
|
||||
self.network: DockerNetwork = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
def images(self):
|
||||
def images(self) -> docker.models.images.ImageCollection:
|
||||
"""Return API images."""
|
||||
return self.docker.images
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
def containers(self) -> docker.models.containers.ContainerCollection:
|
||||
"""Return API containers."""
|
||||
return self.docker.containers
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
def api(self) -> docker.APIClient:
|
||||
"""Return API containers."""
|
||||
return self.docker.api
|
||||
|
||||
def run(self, image, **kwargs):
|
||||
def run(
|
||||
self, image: str, **kwargs: Dict[str, Any]
|
||||
) -> docker.models.containers.Container:
|
||||
""""Create a Docker container and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
name = kwargs.get('name', image)
|
||||
network_mode = kwargs.get('network_mode')
|
||||
hostname = kwargs.get('hostname')
|
||||
name = kwargs.get("name", image)
|
||||
network_mode = kwargs.get("network_mode")
|
||||
hostname = kwargs.get("hostname")
|
||||
|
||||
# Setup network
|
||||
kwargs['dns_search'] = ["."]
|
||||
kwargs["dns_search"] = ["."]
|
||||
if network_mode:
|
||||
kwargs['dns'] = [str(self.network.supervisor)]
|
||||
kwargs['dns_opt'] = ["ndots:0"]
|
||||
kwargs["dns"] = [str(self.network.supervisor)]
|
||||
kwargs["dns_opt"] = ["ndots:0"]
|
||||
else:
|
||||
kwargs['network'] = None
|
||||
kwargs["network"] = None
|
||||
|
||||
# Create container
|
||||
try:
|
||||
container = self.docker.containers.create(
|
||||
image, use_config_proxy=False, **kwargs)
|
||||
image, use_config_proxy=False, **kwargs
|
||||
)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
# attach network
|
||||
# Attach network
|
||||
if not network_mode:
|
||||
alias = [hostname] if hostname else None
|
||||
if self.network.attach_container(container, alias=alias):
|
||||
self.network.detach_default_bridge(container)
|
||||
else:
|
||||
try:
|
||||
self.network.attach_container(container, alias=alias)
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
self.network.detach_default_bridge(container)
|
||||
|
||||
# run container
|
||||
# Run container
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
return True
|
||||
# Update metadata
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.reload()
|
||||
|
||||
def run_command(self, image, command=None, **kwargs):
|
||||
return container
|
||||
|
||||
def run_command(
|
||||
self, image: str, command: Optional[str] = None, **kwargs: Dict[str, Any]
|
||||
) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
stdout = kwargs.get('stdout', True)
|
||||
stderr = kwargs.get('stderr', True)
|
||||
stdout = kwargs.get("stdout", True)
|
||||
stderr = kwargs.get("stderr", True)
|
||||
|
||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||
try:
|
||||
@@ -112,11 +127,11 @@ class DockerAPI:
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
return CommandReturn(None, b"")
|
||||
raise DockerAPIError() from None
|
||||
|
||||
finally:
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return CommandReturn(result.get('StatusCode'), output)
|
||||
return CommandReturn(result.get("StatusCode"), output)
|
||||
|
@@ -1,15 +1,36 @@
|
||||
"""Init file for Hass.io add-on Docker object."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address, ip_address
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Union, Awaitable
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
|
||||
ENV_TOKEN, ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..const import (
|
||||
ENV_TIME,
|
||||
ENV_TOKEN,
|
||||
MAP_ADDONS,
|
||||
MAP_BACKUP,
|
||||
MAP_CONFIG,
|
||||
MAP_SHARE,
|
||||
MAP_SSL,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..exceptions import DockerAPIError
|
||||
from ..utils import process_lock
|
||||
from .interface import DockerInterface
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..addons.addon import Addon
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,64 +40,72 @@ AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
class DockerAddon(DockerInterface):
|
||||
"""Docker Hass.io wrapper for Home Assistant."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
def __init__(self, coresys: CoreSys, addon: Addon):
|
||||
"""Initialize Docker Home Assistant wrapper."""
|
||||
super().__init__(coresys)
|
||||
self._id = slug
|
||||
self.addon = addon
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return add-on of Docker image."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
def image(self) -> str:
|
||||
"""Return name of Docker image."""
|
||||
return self.addon.image
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP address of this container."""
|
||||
if self.addon.host_network:
|
||||
return self.sys_docker.network.gateway
|
||||
|
||||
# Extract IP-Address
|
||||
try:
|
||||
return ip_address(
|
||||
self._meta["NetworkSettings"]["Networks"]["hassio"]["IPAddress"])
|
||||
except (KeyError, TypeError, ValueError):
|
||||
return ip_address("0.0.0.0")
|
||||
|
||||
@property
|
||||
def timeout(self) -> int:
|
||||
"""Return timeout for Docker actions."""
|
||||
return self.addon.timeout
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
def version(self) -> str:
|
||||
"""Return version of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.addon.version_installed
|
||||
return self.addon.version
|
||||
return super().version
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
def arch(self) -> str:
|
||||
"""Return arch of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.sys_arch.default
|
||||
return super().arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return name of Docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
return f"addon_{self.addon.slug}"
|
||||
|
||||
@property
|
||||
def ipc(self):
|
||||
def ipc(self) -> Optional[str]:
|
||||
"""Return the IPC namespace."""
|
||||
if self.addon.host_ipc:
|
||||
return 'host'
|
||||
return "host"
|
||||
return None
|
||||
|
||||
@property
|
||||
def full_access(self):
|
||||
def full_access(self) -> bool:
|
||||
"""Return True if full access is enabled."""
|
||||
return not self.addon.protected and self.addon.with_full_access
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
def hostname(self) -> str:
|
||||
"""Return slug/id of add-on."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
return self.addon.slug.replace("_", "-")
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
def environment(self) -> Dict[str, str]:
|
||||
"""Return environment for Docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
@@ -86,8 +115,7 @@ class DockerAddon(DockerInterface):
|
||||
if isinstance(value, (int, str)):
|
||||
addon_env[key] = value
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Can not set nested option %s as Docker env", key)
|
||||
_LOGGER.warning("Can not set nested option %s as Docker env", key)
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
@@ -96,9 +124,13 @@ class DockerAddon(DockerInterface):
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
def devices(self) -> List[str]:
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
devices = []
|
||||
|
||||
# Extend add-on config
|
||||
if self.addon.devices:
|
||||
devices.extend(self.addon.devices)
|
||||
|
||||
# Use audio devices
|
||||
if self.addon.with_audio and self.sys_hardware.support_audio:
|
||||
@@ -113,9 +145,9 @@ class DockerAddon(DockerInterface):
|
||||
return devices or None
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
def ports(self) -> Optional[Dict[str, Union[str, int, None]]]:
|
||||
"""Filter None from add-on ports."""
|
||||
if not self.addon.ports:
|
||||
if self.addon.host_network or not self.addon.ports:
|
||||
return None
|
||||
|
||||
return {
|
||||
@@ -125,7 +157,7 @@ class DockerAddon(DockerInterface):
|
||||
}
|
||||
|
||||
@property
|
||||
def security_opt(self):
|
||||
def security_opt(self) -> List[str]:
|
||||
"""Controlling security options."""
|
||||
security = []
|
||||
|
||||
@@ -143,7 +175,7 @@ class DockerAddon(DockerInterface):
|
||||
return security
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
def tmpfs(self) -> Optional[Dict[str, str]]:
|
||||
"""Return tmpfs for Docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
@@ -151,156 +183,148 @@ class DockerAddon(DockerInterface):
|
||||
return None
|
||||
|
||||
@property
|
||||
def network_mapping(self):
|
||||
def network_mapping(self) -> Dict[str, str]:
|
||||
"""Return hosts mapping."""
|
||||
return {
|
||||
'homeassistant': self.sys_docker.network.gateway,
|
||||
'hassio': self.sys_docker.network.supervisor,
|
||||
"homeassistant": self.sys_docker.network.gateway,
|
||||
"hassio": self.sys_docker.network.supervisor,
|
||||
}
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
def network_mode(self) -> Optional[str]:
|
||||
"""Return network mode for add-on."""
|
||||
if self.addon.host_network:
|
||||
return 'host'
|
||||
return "host"
|
||||
return None
|
||||
|
||||
@property
|
||||
def pid_mode(self):
|
||||
def pid_mode(self) -> Optional[str]:
|
||||
"""Return PID mode for add-on."""
|
||||
if not self.addon.protected and self.addon.host_pid:
|
||||
return 'host'
|
||||
return "host"
|
||||
return None
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
def volumes(self) -> Dict[str, Dict[str, str]]:
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': "/data",
|
||||
'mode': 'rw'
|
||||
}
|
||||
}
|
||||
volumes = {str(self.addon.path_extern_data): {"bind": "/data", "mode": "rw"}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
# setup config mappings
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
'bind': "/config",
|
||||
'mode': addon_mapping[MAP_CONFIG]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
"bind": "/config",
|
||||
"mode": addon_mapping[MAP_CONFIG],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
'bind': "/ssl",
|
||||
'mode': addon_mapping[MAP_SSL]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
"bind": "/ssl",
|
||||
"mode": addon_mapping[MAP_SSL],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
'bind': "/addons",
|
||||
'mode': addon_mapping[MAP_ADDONS]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
"bind": "/addons",
|
||||
"mode": addon_mapping[MAP_ADDONS],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
'bind': "/backup",
|
||||
'mode': addon_mapping[MAP_BACKUP]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
"bind": "/backup",
|
||||
"mode": addon_mapping[MAP_BACKUP],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_share): {
|
||||
'bind': "/share",
|
||||
'mode': addon_mapping[MAP_SHARE]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_share): {
|
||||
"bind": "/share",
|
||||
"mode": addon_mapping[MAP_SHARE],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
# Init other hardware mappings
|
||||
|
||||
# GPIO support
|
||||
if self.addon.with_gpio and self.sys_hardware.support_gpio:
|
||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
||||
volumes.update({
|
||||
gpio_path: {
|
||||
'bind': gpio_path,
|
||||
'mode': 'rw'
|
||||
},
|
||||
})
|
||||
volumes.update({gpio_path: {"bind": gpio_path, "mode": "rw"}})
|
||||
|
||||
# DeviceTree support
|
||||
if self.addon.with_devicetree:
|
||||
volumes.update({
|
||||
"/sys/firmware/devicetree/base": {
|
||||
'bind': "/device-tree",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
volumes.update(
|
||||
{
|
||||
"/sys/firmware/devicetree/base": {
|
||||
"bind": "/device-tree",
|
||||
"mode": "ro",
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# Kernel Modules support
|
||||
if self.addon.with_kernel_modules:
|
||||
volumes.update({
|
||||
"/lib/modules": {
|
||||
'bind': "/lib/modules",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
volumes.update({"/lib/modules": {"bind": "/lib/modules", "mode": "ro"}})
|
||||
|
||||
# Docker API support
|
||||
if not self.addon.protected and self.addon.access_docker_api:
|
||||
volumes.update({
|
||||
"/var/run/docker.sock": {
|
||||
'bind': "/var/run/docker.sock",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
volumes.update(
|
||||
{"/var/run/docker.sock": {"bind": "/var/run/docker.sock", "mode": "ro"}}
|
||||
)
|
||||
|
||||
# Host D-Bus system
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus",
|
||||
'mode': 'rw'
|
||||
}
|
||||
})
|
||||
volumes.update({"/var/run/dbus": {"bind": "/var/run/dbus", "mode": "rw"}})
|
||||
|
||||
# ALSA configuration
|
||||
if self.addon.with_audio:
|
||||
volumes.update({
|
||||
str(self.addon.path_extern_asound): {
|
||||
'bind': "/etc/asound.conf",
|
||||
'mode': 'ro'
|
||||
volumes.update(
|
||||
{
|
||||
str(self.addon.path_extern_asound): {
|
||||
"bind": "/etc/asound.conf",
|
||||
"mode": "ro",
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
def _run(self) -> None:
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return True
|
||||
return
|
||||
|
||||
# Security check
|
||||
if not self.addon.protected:
|
||||
_LOGGER.warning("%s run with disabled protected mode!",
|
||||
self.addon.name)
|
||||
_LOGGER.warning("%s run with disabled protected mode!", self.addon.name)
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
# Create & Run container
|
||||
docker_container = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
@@ -318,55 +342,53 @@ class DockerAddon(DockerInterface):
|
||||
security_opt=self.security_opt,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs)
|
||||
tmpfs=self.tmpfs,
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image,
|
||||
self.version)
|
||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image, self.version)
|
||||
self._meta = docker_container.attrs
|
||||
|
||||
return ret
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
def _install(self, tag: str, image: Optional[str] = None) -> None:
|
||||
"""Pull Docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
self._build(tag)
|
||||
else:
|
||||
super()._install(tag, image)
|
||||
|
||||
return super()._install(tag, image)
|
||||
|
||||
def _build(self, tag):
|
||||
def _build(self, tag: str) -> None:
|
||||
"""Build a Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_env = AddonBuild(self.coresys, self._id)
|
||||
build_env = AddonBuild(self.coresys, self.addon)
|
||||
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image, log = self.sys_docker.images.build(
|
||||
use_config_proxy=False, **build_env.get_docker_args(tag))
|
||||
use_config_proxy=False, **build_env.get_docker_args(tag)
|
||||
)
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
image.tag(self.image, tag='latest')
|
||||
image.tag(self.image, tag="latest")
|
||||
|
||||
# Update meta data
|
||||
self._meta = image.attrs
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def export_image(self, path):
|
||||
def export_image(self, tar_file: Path) -> Awaitable[None]:
|
||||
"""Export current images into a tar file."""
|
||||
return self.sys_run_in_executor(self._export_image, path)
|
||||
return self.sys_run_in_executor(self._export_image, tar_file)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
def _export_image(self, tar_file: Path) -> None:
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -375,7 +397,7 @@ class DockerAddon(DockerInterface):
|
||||
image = self.sys_docker.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
try:
|
||||
@@ -384,17 +406,16 @@ class DockerAddon(DockerInterface):
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def import_image(self, path, tag):
|
||||
def import_image(self, tar_file: Path, tag: str) -> Awaitable[None]:
|
||||
"""Import a tar file as image."""
|
||||
return self.sys_run_in_executor(self._import_image, path, tag)
|
||||
return self.sys_run_in_executor(self._import_image, tar_file, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
def _import_image(self, tar_file: Path, tag: str) -> None:
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -403,37 +424,38 @@ class DockerAddon(DockerInterface):
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.sys_docker.api.load_image(read_tar, quiet=True)
|
||||
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
docker_image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self._meta = image.attrs
|
||||
self._cleanup()
|
||||
return True
|
||||
self._meta = docker_image.attrs
|
||||
|
||||
with suppress(DockerAPIError):
|
||||
self._cleanup()
|
||||
|
||||
@process_lock
|
||||
def write_stdin(self, data):
|
||||
def write_stdin(self, data: bytes) -> Awaitable[None]:
|
||||
"""Write to add-on stdin."""
|
||||
return self.sys_run_in_executor(self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data):
|
||||
def _write_stdin(self, data: bytes) -> None:
|
||||
"""Write to add-on stdin.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self._is_running():
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
try:
|
||||
# Load needed docker objects
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||
socket = container.attach_socket(params={"stdin": 1, "stream": 1})
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
try:
|
||||
# Write to stdin
|
||||
@@ -442,6 +464,4 @@ class DockerAddon(DockerInterface):
|
||||
socket.close()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
raise DockerAPIError() from None
|
||||
|
@@ -1,10 +1,14 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from typing import Awaitable
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
|
||||
from ..const import ENV_TIME, ENV_TOKEN, LABEL_MACHINE
|
||||
from ..exceptions import DockerAPIError
|
||||
from .interface import CommandReturn, DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,6 +35,11 @@ class DockerHomeAssistant(DockerInterface):
|
||||
"""Return name of Docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def timeout(self) -> str:
|
||||
"""Return timeout for Docker actions."""
|
||||
return 60
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into Docker."""
|
||||
@@ -39,18 +48,25 @@ class DockerHomeAssistant(DockerInterface):
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
return devices or None
|
||||
|
||||
def _run(self):
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP address of this container."""
|
||||
return self.sys_docker.network.gateway
|
||||
|
||||
def _run(self) -> None:
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return False
|
||||
return
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
# Create & Run container
|
||||
docker_container = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
@@ -77,14 +93,10 @@ class DockerHomeAssistant(DockerInterface):
|
||||
},
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version
|
||||
)
|
||||
_LOGGER.info("Start homeassistant %s with version %s", self.image, self.version)
|
||||
self._meta = docker_container.attrs
|
||||
|
||||
return ret
|
||||
|
||||
def _execute_command(self, command):
|
||||
def _execute_command(self, command: str) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -112,11 +124,11 @@ class DockerHomeAssistant(DockerInterface):
|
||||
},
|
||||
)
|
||||
|
||||
def is_initialize(self):
|
||||
def is_initialize(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker container exists."""
|
||||
return self.sys_run_in_executor(self._is_initialize)
|
||||
|
||||
def _is_initialize(self):
|
||||
def _is_initialize(self) -> bool:
|
||||
"""Return True if docker container exists.
|
||||
|
||||
Need run inside executor.
|
||||
|
@@ -2,13 +2,16 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any, Dict, Optional, Awaitable
|
||||
|
||||
import docker
|
||||
|
||||
from ..const import LABEL_ARCH, LABEL_VERSION
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DockerAPIError
|
||||
from ..utils import process_lock
|
||||
from .stats import DockerStats
|
||||
from . import CommandReturn
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,84 +19,84 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerInterface(CoreSysAttributes):
|
||||
"""Docker Hass.io interface."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
self.coresys: CoreSys = coresys
|
||||
self._meta: Optional[Dict[str, Any]] = None
|
||||
self.lock: asyncio.Lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
def timeout(self) -> str:
|
||||
"""Return timeout for Docker actions."""
|
||||
return 30
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> Optional[str]:
|
||||
"""Return name of Docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def meta_config(self):
|
||||
def meta_config(self) -> Dict[str, Any]:
|
||||
"""Return meta data of configuration for container/image."""
|
||||
if not self._meta:
|
||||
return {}
|
||||
return self._meta.get("Config", {})
|
||||
|
||||
@property
|
||||
def meta_labels(self):
|
||||
def meta_labels(self) -> Dict[str, str]:
|
||||
"""Return meta data of labels for container/image."""
|
||||
return self.meta_config.get("Labels") or {}
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
def image(self) -> Optional[str]:
|
||||
"""Return name of Docker image."""
|
||||
return self.meta_config.get("Image")
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return version of Docker image."""
|
||||
return self.meta_labels.get(LABEL_VERSION)
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
def arch(self) -> Optional[str]:
|
||||
"""Return arch of Docker image."""
|
||||
return self.meta_labels.get(LABEL_ARCH)
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
return self.lock.locked()
|
||||
|
||||
@process_lock
|
||||
def install(self, tag, image=None):
|
||||
def install(self, tag: str, image: Optional[str] = None):
|
||||
"""Pull docker image."""
|
||||
return self.sys_run_in_executor(self._install, tag, image)
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
def _install(self, tag: str, image: Optional[str] = None) -> None:
|
||||
"""Pull Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
image = image.partition(':')[0] # remove potential tag
|
||||
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
||||
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
docker_image.tag(image, tag="latest")
|
||||
self._meta = docker_image.attrs
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
else:
|
||||
self._meta = docker_image.attrs
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
def exists(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker image exists in local repository."""
|
||||
return self.sys_run_in_executor(self._exists)
|
||||
|
||||
def _exists(self):
|
||||
def _exists(self) -> bool:
|
||||
"""Return True if Docker image exists in local repository.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -106,14 +109,14 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
def is_running(self):
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
def _is_running(self) -> bool:
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -139,7 +142,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""Attach to running Docker container."""
|
||||
return self.sys_run_in_executor(self._attach)
|
||||
|
||||
def _attach(self):
|
||||
def _attach(self) -> None:
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -147,21 +150,21 @@ class DockerInterface(CoreSysAttributes):
|
||||
try:
|
||||
if self.image:
|
||||
self._meta = self.sys_docker.images.get(self.image).attrs
|
||||
else:
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
pass
|
||||
|
||||
_LOGGER.info("Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
return True
|
||||
# Successfull?
|
||||
if not self._meta:
|
||||
raise DockerAPIError() from None
|
||||
_LOGGER.info("Attach to %s with version %s", self.image, self.version)
|
||||
|
||||
@process_lock
|
||||
def run(self):
|
||||
def run(self) -> Awaitable[None]:
|
||||
"""Run Docker image."""
|
||||
return self.sys_run_in_executor(self._run)
|
||||
|
||||
def _run(self):
|
||||
def _run(self) -> None:
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -169,11 +172,11 @@ class DockerInterface(CoreSysAttributes):
|
||||
raise NotImplementedError()
|
||||
|
||||
@process_lock
|
||||
def stop(self, remove_container=True):
|
||||
def stop(self, remove_container=True) -> Awaitable[None]:
|
||||
"""Stop/remove Docker container."""
|
||||
return self.sys_run_in_executor(self._stop, remove_container)
|
||||
|
||||
def _stop(self, remove_container=True):
|
||||
def _stop(self, remove_container=True) -> None:
|
||||
"""Stop/remove Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -181,26 +184,24 @@ class DockerInterface(CoreSysAttributes):
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
if docker_container.status == "running":
|
||||
_LOGGER.info("Stop %s Docker application", self.image)
|
||||
_LOGGER.info("Stop %s application", self.name)
|
||||
with suppress(docker.errors.DockerException):
|
||||
docker_container.stop(timeout=self.timeout)
|
||||
|
||||
if remove_container:
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s Docker application", self.image)
|
||||
_LOGGER.info("Clean %s application", self.name)
|
||||
docker_container.remove(force=True)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def start(self):
|
||||
def start(self) -> Awaitable[None]:
|
||||
"""Start Docker container."""
|
||||
return self.sys_run_in_executor(self._start)
|
||||
|
||||
def _start(self):
|
||||
def _start(self) -> None:
|
||||
"""Start docker container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -208,31 +209,30 @@ class DockerInterface(CoreSysAttributes):
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Start %s", self.image)
|
||||
try:
|
||||
docker_container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
raise DockerAPIError() from None
|
||||
|
||||
@process_lock
|
||||
def remove(self):
|
||||
def remove(self) -> Awaitable[None]:
|
||||
"""Remove Docker images."""
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self):
|
||||
def _remove(self) -> None:
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
# Cleanup container
|
||||
self._stop()
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove Docker %s with latest and %s", self.image, self.version)
|
||||
_LOGGER.info("Remove image %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
@@ -245,17 +245,16 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def update(self, tag, image=None):
|
||||
def update(self, tag: str, image: Optional[str] = None) -> Awaitable[None]:
|
||||
"""Update a Docker image."""
|
||||
return self.sys_run_in_executor(self._update, tag, image)
|
||||
|
||||
def _update(self, tag, image=None):
|
||||
def _update(self, tag: str, image: Optional[str] = None) -> None:
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -263,27 +262,27 @@ class DockerInterface(CoreSysAttributes):
|
||||
image = image or self.image
|
||||
|
||||
_LOGGER.info(
|
||||
"Update Docker %s:%s to %s:%s", self.image, self.version, image, tag
|
||||
"Update image %s:%s to %s:%s", self.image, self.version, image, tag
|
||||
)
|
||||
|
||||
# Update docker image
|
||||
if not self._install(tag, image):
|
||||
return False
|
||||
self._install(tag, image)
|
||||
|
||||
# Stop container & cleanup
|
||||
self._stop()
|
||||
self._cleanup()
|
||||
with suppress(DockerAPIError):
|
||||
try:
|
||||
self._stop()
|
||||
finally:
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
def logs(self):
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._logs)
|
||||
|
||||
def _logs(self):
|
||||
def _logs(self) -> bytes:
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -299,11 +298,11 @@ class DockerInterface(CoreSysAttributes):
|
||||
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
|
||||
|
||||
@process_lock
|
||||
def cleanup(self):
|
||||
def cleanup(self) -> Awaitable[None]:
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.sys_run_in_executor(self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
def _cleanup(self) -> None:
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -312,24 +311,22 @@ class DockerInterface(CoreSysAttributes):
|
||||
latest = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
for image in self.sys_docker.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup Docker images: %s", image.tags)
|
||||
_LOGGER.info("Cleanup images: %s", image.tags)
|
||||
self.sys_docker.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def restart(self):
|
||||
def restart(self) -> Awaitable[None]:
|
||||
"""Restart docker container."""
|
||||
return self.sys_loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
def _restart(self) -> None:
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -337,33 +334,32 @@ class DockerInterface(CoreSysAttributes):
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||
return False
|
||||
return True
|
||||
raise DockerAPIError() from None
|
||||
|
||||
@process_lock
|
||||
def execute_command(self, command):
|
||||
def execute_command(self, command: str) -> Awaitable[CommandReturn]:
|
||||
"""Create a temporary container and run command."""
|
||||
return self.sys_run_in_executor(self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
def _execute_command(self, command: str) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def stats(self):
|
||||
def stats(self) -> Awaitable[DockerStats]:
|
||||
"""Read and return stats from container."""
|
||||
return self.sys_run_in_executor(self._stats)
|
||||
|
||||
def _stats(self):
|
||||
def _stats(self) -> DockerStats:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -371,23 +367,23 @@ class DockerInterface(CoreSysAttributes):
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
raise DockerAPIError() from None
|
||||
|
||||
try:
|
||||
stats = docker_container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
return None
|
||||
raise DockerAPIError() from None
|
||||
|
||||
def is_fails(self):
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_fails)
|
||||
|
||||
def _is_fails(self):
|
||||
def _is_fails(self) -> bool:
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Need run inside executor.
|
||||
|
@@ -1,9 +1,12 @@
|
||||
"""Internal network manager for Hass.io."""
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
import docker
|
||||
|
||||
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||
from ..const import DOCKER_NETWORK, DOCKER_NETWORK_MASK, DOCKER_NETWORK_RANGE
|
||||
from ..exceptions import DockerAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,32 +17,32 @@ class DockerNetwork:
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self, dock):
|
||||
def __init__(self, docker_client: docker.DockerClient):
|
||||
"""Initialize internal Hass.io network."""
|
||||
self.docker = dock
|
||||
self.network = self._get_network()
|
||||
self.docker: docker.DockerClient = docker_client
|
||||
self.network: docker.models.networks.Network = self._get_network()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return name of network."""
|
||||
return DOCKER_NETWORK
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
def containers(self) -> List[docker.models.containers.Container]:
|
||||
"""Return of connected containers from network."""
|
||||
return self.network.containers
|
||||
|
||||
@property
|
||||
def gateway(self):
|
||||
def gateway(self) -> IPv4Address:
|
||||
"""Return gateway of the network."""
|
||||
return DOCKER_NETWORK_MASK[1]
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
def supervisor(self) -> IPv4Address:
|
||||
"""Return supervisor of the network."""
|
||||
return DOCKER_NETWORK_MASK[2]
|
||||
|
||||
def _get_network(self):
|
||||
def _get_network(self) -> docker.models.networks.Network:
|
||||
"""Get HassIO network."""
|
||||
try:
|
||||
return self.docker.networks.get(DOCKER_NETWORK)
|
||||
@@ -49,18 +52,25 @@ class DockerNetwork:
|
||||
ipam_pool = docker.types.IPAMPool(
|
||||
subnet=str(DOCKER_NETWORK_MASK),
|
||||
gateway=str(self.gateway),
|
||||
iprange=str(DOCKER_NETWORK_RANGE)
|
||||
iprange=str(DOCKER_NETWORK_RANGE),
|
||||
)
|
||||
|
||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||
|
||||
return self.docker.networks.create(
|
||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||
enable_ipv6=False, options={
|
||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||
})
|
||||
DOCKER_NETWORK,
|
||||
driver="bridge",
|
||||
ipam=ipam_config,
|
||||
enable_ipv6=False,
|
||||
options={"com.docker.network.bridge.name": DOCKER_NETWORK},
|
||||
)
|
||||
|
||||
def attach_container(self, container, alias=None, ipv4=None):
|
||||
def attach_container(
|
||||
self,
|
||||
container: docker.models.containers.Container,
|
||||
alias: Optional[List[str]] = None,
|
||||
ipv4: Optional[IPv4Address] = None,
|
||||
) -> None:
|
||||
"""Attach container to Hass.io network.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -71,23 +81,24 @@ class DockerNetwork:
|
||||
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't link container to hassio-net: %s", err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self.network.reload()
|
||||
return True
|
||||
|
||||
def detach_default_bridge(self, container):
|
||||
def detach_default_bridge(
|
||||
self, container: docker.models.containers.Container
|
||||
) -> None:
|
||||
"""Detach default Docker bridge.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
default_network = self.docker.networks.get('bridge')
|
||||
default_network = self.docker.networks.get("bridge")
|
||||
default_network.disconnect(container)
|
||||
|
||||
except docker.errors.NotFound:
|
||||
return
|
||||
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning(
|
||||
"Can't disconnect container from default: %s", err)
|
||||
_LOGGER.warning("Can't disconnect container from default: %s", err)
|
||||
raise DockerAPIError() from None
|
||||
|
@@ -1,11 +1,13 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DockerAPIError
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,29 +16,36 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for Supervisor."""
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return name of Docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
return os.environ["SUPERVISOR_NAME"]
|
||||
|
||||
def _attach(self):
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP address of this container."""
|
||||
return self.sys_docker.network.supervisor
|
||||
|
||||
def _attach(self) -> None:
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self._meta = container.attrs
|
||||
_LOGGER.info("Attach to Supervisor %s with version %s",
|
||||
self.image, self.version)
|
||||
self._meta = docker_container.attrs
|
||||
_LOGGER.info(
|
||||
"Attach to Supervisor %s with version %s", self.image, self.version
|
||||
)
|
||||
|
||||
# If already attach
|
||||
if container in self.sys_docker.network.containers:
|
||||
return True
|
||||
if docker_container in self.sys_docker.network.containers:
|
||||
return
|
||||
|
||||
# Attach to network
|
||||
return self.sys_docker.network.attach_container(
|
||||
container, alias=['hassio'],
|
||||
ipv4=self.sys_docker.network.supervisor)
|
||||
_LOGGER.info("Connect Supervisor to Hass.io Network")
|
||||
self.sys_docker.network.attach_container(
|
||||
docker_container, alias=["hassio"], ipv4=self.sys_docker.network.supervisor
|
||||
)
|
||||
|
@@ -28,6 +28,17 @@ class HomeAssistantAuthError(HomeAssistantAPIError):
|
||||
"""Home Assistant Auth API exception."""
|
||||
|
||||
|
||||
# Supervisor
|
||||
|
||||
|
||||
class SupervisorError(HassioError):
|
||||
"""Supervisor error."""
|
||||
|
||||
|
||||
class SupervisorUpdateError(SupervisorError):
|
||||
"""Supervisor update error."""
|
||||
|
||||
|
||||
# HassOS
|
||||
|
||||
|
||||
@@ -43,6 +54,17 @@ class HassOSNotSupportedError(HassioNotSupportedError):
|
||||
"""Function not supported by HassOS."""
|
||||
|
||||
|
||||
# Addons
|
||||
|
||||
|
||||
class AddonsError(HassioError):
|
||||
"""Addons exception."""
|
||||
|
||||
|
||||
class AddonsNotSupportedError(HassioNotSupportedError):
|
||||
"""Addons don't support a function."""
|
||||
|
||||
|
||||
# Arch
|
||||
|
||||
|
||||
@@ -144,3 +166,10 @@ class AppArmorInvalidError(AppArmorError):
|
||||
|
||||
class JsonFileError(HassioError):
|
||||
"""Invalid json file."""
|
||||
|
||||
|
||||
# docker/api
|
||||
|
||||
|
||||
class DockerAPIError(HassioError):
|
||||
"""Docker API error."""
|
||||
|
@@ -1,15 +1,22 @@
|
||||
"""HassOS support on supervisor."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Awaitable, Optional
|
||||
|
||||
import aiohttp
|
||||
from cpe import CPE
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import URL_HASSOS_OTA
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .docker.hassos_cli import DockerHassOSCli
|
||||
from .exceptions import HassOSNotSupportedError, HassOSUpdateError, DBusError
|
||||
from .exceptions import (
|
||||
DBusError,
|
||||
HassOSNotSupportedError,
|
||||
HassOSUpdateError,
|
||||
DockerAPIError,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -17,61 +24,61 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class HassOS(CoreSysAttributes):
|
||||
"""HassOS interface inside HassIO."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize HassOS handler."""
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHassOSCli(coresys)
|
||||
self._available = False
|
||||
self._version = None
|
||||
self._board = None
|
||||
self.coresys: CoreSys = coresys
|
||||
self.instance: DockerHassOSCli = DockerHassOSCli(coresys)
|
||||
self._available: bool = False
|
||||
self._version: Optional[str] = None
|
||||
self._board: Optional[str] = None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
def available(self) -> bool:
|
||||
"""Return True, if HassOS on host."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return version of HassOS."""
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def version_cli(self):
|
||||
def version_cli(self) -> Optional[str]:
|
||||
"""Return version of HassOS cli."""
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def version_latest(self):
|
||||
def version_latest(self) -> str:
|
||||
"""Return version of HassOS."""
|
||||
return self.sys_updater.version_hassos
|
||||
|
||||
@property
|
||||
def version_cli_latest(self):
|
||||
def version_cli_latest(self) -> str:
|
||||
"""Return version of HassOS."""
|
||||
return self.sys_updater.version_hassos_cli
|
||||
|
||||
@property
|
||||
def need_update(self):
|
||||
def need_update(self) -> bool:
|
||||
"""Return true if a HassOS update is available."""
|
||||
return self.version != self.version_latest
|
||||
|
||||
@property
|
||||
def need_cli_update(self):
|
||||
def need_cli_update(self) -> bool:
|
||||
"""Return true if a HassOS cli update is available."""
|
||||
return self.version_cli != self.version_cli_latest
|
||||
|
||||
@property
|
||||
def board(self):
|
||||
def board(self) -> Optional[str]:
|
||||
"""Return board name."""
|
||||
return self._board
|
||||
|
||||
def _check_host(self):
|
||||
def _check_host(self) -> None:
|
||||
"""Check if HassOS is available."""
|
||||
if not self.available:
|
||||
_LOGGER.error("No HassOS available")
|
||||
raise HassOSNotSupportedError()
|
||||
|
||||
async def _download_raucb(self, version):
|
||||
async def _download_raucb(self, version: str) -> None:
|
||||
"""Download rauc bundle (OTA) from github."""
|
||||
url = URL_HASSOS_OTA.format(version=version, board=self.board)
|
||||
raucb = Path(self.sys_config.path_tmp, f"hassos-{version}.raucb")
|
||||
@@ -83,9 +90,9 @@ class HassOS(CoreSysAttributes):
|
||||
raise HassOSUpdateError()
|
||||
|
||||
# Download RAUCB file
|
||||
with raucb.open('wb') as ota_file:
|
||||
with raucb.open("wb") as ota_file:
|
||||
while True:
|
||||
chunk = await request.content.read(1048576)
|
||||
chunk = await request.content.read(1_048_576)
|
||||
if not chunk:
|
||||
break
|
||||
ota_file.write(chunk)
|
||||
@@ -101,7 +108,7 @@ class HassOS(CoreSysAttributes):
|
||||
|
||||
raise HassOSUpdateError()
|
||||
|
||||
async def load(self):
|
||||
async def load(self) -> None:
|
||||
"""Load HassOS data."""
|
||||
try:
|
||||
# Check needed host functions
|
||||
@@ -111,7 +118,7 @@ class HassOS(CoreSysAttributes):
|
||||
|
||||
assert self.sys_host.info.cpe is not None
|
||||
cpe = CPE(self.sys_host.info.cpe)
|
||||
assert cpe.get_product()[0] == 'hassos'
|
||||
assert cpe.get_product()[0] == "hassos"
|
||||
except (AssertionError, NotImplementedError):
|
||||
_LOGGER.debug("Found no HassOS")
|
||||
return
|
||||
@@ -122,9 +129,10 @@ class HassOS(CoreSysAttributes):
|
||||
self._board = cpe.get_target_hardware()[0]
|
||||
|
||||
_LOGGER.info("Detect HassOS %s on host system", self.version)
|
||||
await self.instance.attach()
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.attach()
|
||||
|
||||
def config_sync(self):
|
||||
def config_sync(self) -> Awaitable[None]:
|
||||
"""Trigger a host config reload from usb.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -132,9 +140,9 @@ class HassOS(CoreSysAttributes):
|
||||
self._check_host()
|
||||
|
||||
_LOGGER.info("Syncing configuration from USB with HassOS.")
|
||||
return self.sys_host.services.restart('hassos-config.service')
|
||||
return self.sys_host.services.restart("hassos-config.service")
|
||||
|
||||
async def update(self, version=None):
|
||||
async def update(self, version: Optional[str] = None) -> None:
|
||||
"""Update HassOS system."""
|
||||
version = version or self.version_latest
|
||||
|
||||
@@ -167,20 +175,19 @@ class HassOS(CoreSysAttributes):
|
||||
|
||||
# Update fails
|
||||
rauc_status = await self.sys_dbus.get_properties()
|
||||
_LOGGER.error(
|
||||
"HassOS update fails with: %s", rauc_status.get('LastError'))
|
||||
_LOGGER.error("HassOS update fails with: %s", rauc_status.get("LastError"))
|
||||
raise HassOSUpdateError()
|
||||
|
||||
async def update_cli(self, version=None):
|
||||
async def update_cli(self, version: Optional[str] = None) -> None:
|
||||
"""Update local HassOS cli."""
|
||||
version = version or self.version_cli_latest
|
||||
|
||||
if version == self.version_cli:
|
||||
_LOGGER.warning("Version %s is already installed for CLI", version)
|
||||
raise HassOSUpdateError()
|
||||
|
||||
if await self.instance.update(version):
|
||||
return
|
||||
|
||||
_LOGGER.error("HassOS CLI update fails")
|
||||
raise HassOSUpdateError()
|
||||
try:
|
||||
await self.instance.update(version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.error("HassOS CLI update fails")
|
||||
raise HassOSUpdateError() from None
|
||||
|
@@ -7,9 +7,10 @@ import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import secrets
|
||||
import socket
|
||||
import time
|
||||
from typing import Any, AsyncContextManager, Coroutine, Dict, Optional
|
||||
from typing import Any, AsyncContextManager, Awaitable, Dict, Optional
|
||||
from uuid import UUID
|
||||
|
||||
import aiohttp
|
||||
@@ -33,13 +34,15 @@ from .const import (
|
||||
)
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .docker.stats import DockerStats
|
||||
from .exceptions import (
|
||||
DockerAPIError,
|
||||
HomeAssistantAPIError,
|
||||
HomeAssistantAuthError,
|
||||
HomeAssistantError,
|
||||
HomeAssistantUpdateError,
|
||||
)
|
||||
from .utils import convert_to_ascii, create_token, process_lock
|
||||
from .utils import convert_to_ascii, process_lock
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
@@ -72,7 +75,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Prepare Home Assistant object."""
|
||||
if await self.instance.attach():
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.attach()
|
||||
return
|
||||
|
||||
_LOGGER.info("No Home Assistant Docker image %s found.", self.image)
|
||||
@@ -94,9 +98,9 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return self._error_state
|
||||
|
||||
@property
|
||||
def api_ip(self) -> IPv4Address:
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP of Home Assistant instance."""
|
||||
return self.sys_docker.network.gateway
|
||||
return self.instance.ip_address
|
||||
|
||||
@property
|
||||
def api_port(self) -> int:
|
||||
@@ -132,7 +136,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
def api_url(self) -> str:
|
||||
"""Return API url to Home Assistant."""
|
||||
return "{}://{}:{}".format('https' if self.api_ssl else 'http',
|
||||
self.api_ip, self.api_port)
|
||||
self.ip_address, self.api_port)
|
||||
|
||||
@property
|
||||
def watchdog(self) -> bool:
|
||||
@@ -160,14 +164,14 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def last_version(self) -> str:
|
||||
def latest_version(self) -> str:
|
||||
"""Return last available version of Home Assistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.sys_updater.version_homeassistant
|
||||
|
||||
@last_version.setter
|
||||
def last_version(self, value: str):
|
||||
@latest_version.setter
|
||||
def latest_version(self, value: str):
|
||||
"""Set last available version of Home Assistant."""
|
||||
if value:
|
||||
self._data[ATTR_LAST_VERSION] = value
|
||||
@@ -230,8 +234,9 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Install a landing page."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
if await self.instance.install('landingpage'):
|
||||
break
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.install('landingpage')
|
||||
return
|
||||
_LOGGER.warning("Fails install landingpage, retry after 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
@@ -241,12 +246,14 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
_LOGGER.info("Setup Home Assistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
if not self.latest_version:
|
||||
await self.sys_updater.reload()
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.instance.install(tag):
|
||||
break
|
||||
tag = self.latest_version
|
||||
if tag:
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.install(tag)
|
||||
break
|
||||
_LOGGER.warning("Error on install Home Assistant. Retry in 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
@@ -260,26 +267,30 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Can't start Home Assistant!")
|
||||
finally:
|
||||
await self.instance.cleanup()
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.cleanup()
|
||||
|
||||
@process_lock
|
||||
async def update(self, version=None) -> None:
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
version = version or self.latest_version
|
||||
rollback = self.version if not self.error_state else None
|
||||
running = await self.instance.is_running()
|
||||
exists = await self.instance.exists()
|
||||
|
||||
if exists and version == self.instance.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return HomeAssistantUpdateError()
|
||||
return
|
||||
|
||||
# process an update
|
||||
async def _update(to_version):
|
||||
"""Run Home Assistant update."""
|
||||
_LOGGER.info("Update Home Assistant to version %s", to_version)
|
||||
if not await self.instance.update(to_version):
|
||||
raise HomeAssistantUpdateError()
|
||||
try:
|
||||
await self.instance.update(to_version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Update Home Assistant image fails")
|
||||
raise HomeAssistantUpdateError() from None
|
||||
|
||||
if running:
|
||||
await self._start()
|
||||
@@ -304,70 +315,84 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return
|
||||
|
||||
# Create new API token
|
||||
self._data[ATTR_ACCESS_TOKEN] = create_token()
|
||||
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
|
||||
if not await self.instance.run():
|
||||
raise HomeAssistantError()
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
async def start(self) -> None:
|
||||
"""Run Home Assistant docker."""
|
||||
if await self.instance.is_running():
|
||||
await self.instance.restart()
|
||||
elif await self.instance.is_initialize():
|
||||
await self.instance.start()
|
||||
else:
|
||||
await self._start()
|
||||
return
|
||||
try:
|
||||
if await self.instance.is_running():
|
||||
await self.instance.restart()
|
||||
elif await self.instance.is_initialize():
|
||||
await self.instance.start()
|
||||
else:
|
||||
await self._start()
|
||||
return
|
||||
|
||||
await self._block_till_run()
|
||||
await self._block_till_run()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
@process_lock
|
||||
def stop(self) -> Coroutine:
|
||||
async def stop(self) -> None:
|
||||
"""Stop Home Assistant Docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stop(remove_container=False)
|
||||
try:
|
||||
return await self.instance.stop(remove_container=False)
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
@process_lock
|
||||
async def restart(self) -> None:
|
||||
"""Restart Home Assistant Docker."""
|
||||
if not await self.instance.restart():
|
||||
raise HomeAssistantError()
|
||||
try:
|
||||
await self.instance.restart()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
async def rebuild(self) -> None:
|
||||
"""Rebuild Home Assistant Docker container."""
|
||||
await self.instance.stop()
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.stop()
|
||||
await self._start()
|
||||
|
||||
def logs(self) -> Coroutine:
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
def stats(self) -> Coroutine:
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of Home Assistant.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
def is_running(self) -> Coroutine:
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_running()
|
||||
|
||||
def is_fails(self) -> Coroutine:
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if a Docker container is fails state.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -438,7 +463,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
||||
# Set old API Password
|
||||
if self.api_password:
|
||||
if not self.refresh_token and self.api_password:
|
||||
headers[HEADER_HA_ACCESS] = self.api_password
|
||||
|
||||
for _ in (1, 2):
|
||||
@@ -485,7 +510,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Check if port is mapped."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
result = sock.connect_ex((str(self.api_ip), self.api_port))
|
||||
result = sock.connect_ex((str(self.ip_address), self.api_port))
|
||||
sock.close()
|
||||
|
||||
# Check if the port is available
|
||||
|
137
hassio/ingress.py
Normal file
137
hassio/ingress.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Fetch last versions from webserver."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import random
|
||||
import secrets
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from .addons.addon import Addon
|
||||
from .const import ATTR_PORTS, ATTR_SESSION, FILE_HASSIO_INGRESS
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .utils.dt import utc_from_timestamp, utcnow
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_INGRESS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Ingress(JsonConfig, CoreSysAttributes):
|
||||
"""Fetch last versions from version.json."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_INGRESS, SCHEMA_INGRESS_CONFIG)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.tokens: Dict[str, str] = {}
|
||||
|
||||
def get(self, token: str) -> Optional[Addon]:
|
||||
"""Return addon they have this ingress token."""
|
||||
if token not in self.tokens:
|
||||
self._update_token_list()
|
||||
return self.sys_addons.get(self.tokens.get(token))
|
||||
|
||||
@property
|
||||
def sessions(self) -> Dict[str, float]:
|
||||
"""Return sessions."""
|
||||
return self._data[ATTR_SESSION]
|
||||
|
||||
@property
|
||||
def ports(self) -> Dict[str, int]:
|
||||
"""Return list of dynamic ports."""
|
||||
return self._data[ATTR_PORTS]
|
||||
|
||||
@property
|
||||
def addons(self) -> List[Addon]:
|
||||
"""Return list of ingress Add-ons."""
|
||||
addons = []
|
||||
for addon in self.sys_addons.installed:
|
||||
if not addon.with_ingress:
|
||||
continue
|
||||
addons.append(addon)
|
||||
return addons
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Update internal data."""
|
||||
self._update_token_list()
|
||||
self._cleanup_sessions()
|
||||
|
||||
_LOGGER.info("Load %d ingress session", len(self.sessions))
|
||||
|
||||
async def reload(self) -> None:
|
||||
"""Reload/Validate sessions."""
|
||||
self._cleanup_sessions()
|
||||
|
||||
async def unload(self) -> None:
|
||||
"""Shutdown sessions."""
|
||||
self.save_data()
|
||||
|
||||
def _cleanup_sessions(self) -> None:
|
||||
"""Remove not used sessions."""
|
||||
now = utcnow()
|
||||
|
||||
sessions = {}
|
||||
for session, valid in self.sessions.items():
|
||||
valid_dt = utc_from_timestamp(valid)
|
||||
if valid_dt < now:
|
||||
continue
|
||||
|
||||
# Is valid
|
||||
sessions[session] = valid
|
||||
|
||||
# Write back
|
||||
self.sessions.clear()
|
||||
self.sessions.update(sessions)
|
||||
|
||||
def _update_token_list(self) -> None:
|
||||
"""Regenerate token <-> Add-on map."""
|
||||
self.tokens.clear()
|
||||
|
||||
# Read all ingress token and build a map
|
||||
for addon in self.addons:
|
||||
self.tokens[addon.ingress_token] = addon.slug
|
||||
|
||||
def create_session(self) -> str:
|
||||
"""Create new session."""
|
||||
session = secrets.token_hex(64)
|
||||
valid = utcnow() + timedelta(minutes=15)
|
||||
|
||||
self.sessions[session] = valid.timestamp()
|
||||
self.save_data()
|
||||
|
||||
return session
|
||||
|
||||
def validate_session(self, session: str) -> bool:
|
||||
"""Return True if session valid and make it longer valid."""
|
||||
if session not in self.sessions:
|
||||
return False
|
||||
valid_until = utc_from_timestamp(self.sessions[session])
|
||||
|
||||
# Is still valid?
|
||||
if valid_until < utcnow():
|
||||
return False
|
||||
|
||||
# Update time
|
||||
valid_until = valid_until + timedelta(minutes=15)
|
||||
self.sessions[session] = valid_until.timestamp()
|
||||
|
||||
return True
|
||||
|
||||
def get_dynamic_port(self, addon_slug: str) -> int:
|
||||
"""Get/Create a dynamic port from range."""
|
||||
if addon_slug in self.ports:
|
||||
return self.ports[addon_slug]
|
||||
port = random.randint(62000, 65500)
|
||||
|
||||
# Save port for next time
|
||||
self.ports[addon_slug] = port
|
||||
self.save_data()
|
||||
return port
|
||||
|
||||
async def update_hass_panel(self, addon: Addon):
|
||||
"""Return True if Home Assistant up and running."""
|
||||
method = "post" if addon.ingress_panel else "delete"
|
||||
async with self.sys_homeassistant.make_request(method, f"api/hassio_push/panel/{addon.slug}") as resp:
|
||||
if resp.status in (200, 201):
|
||||
_LOGGER.info("Update Ingress as panel for %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.warning("Fails Ingress panel for %s with %i", addon.slug, resp.status)
|
@@ -70,7 +70,7 @@ class Hardware:
|
||||
def audio_devices(self):
|
||||
"""Return all available audio interfaces."""
|
||||
if not ASOUND_CARDS.exists():
|
||||
_LOGGER.info("No audio devices found")
|
||||
_LOGGER.debug("No audio devices found")
|
||||
return {}
|
||||
|
||||
try:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user